repo_id
stringlengths
21
96
file_path
stringlengths
31
155
content
stringlengths
1
92.9M
__index_level_0__
int64
0
0
rapidsai_public_repos/cudf/cpp/src
rapidsai_public_repos/cudf/cpp/src/text/replace.cu
/* * Copyright (c) 2020-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include <text/utilities/tokenize_ops.cuh> #include <nvtext/detail/tokenize.hpp> #include <nvtext/replace.hpp> #include <cudf/column/column.hpp> #include <cudf/column/column_device_view.cuh> #include <cudf/column/column_factories.hpp> #include <cudf/detail/null_mask.hpp> #include <cudf/detail/nvtx/ranges.hpp> #include <cudf/strings/detail/strings_children.cuh> #include <cudf/strings/detail/utilities.cuh> #include <cudf/strings/string_view.cuh> #include <cudf/strings/strings_column_view.hpp> #include <cudf/utilities/default_stream.hpp> #include <cudf/utilities/error.hpp> #include <rmm/cuda_stream_view.hpp> #include <thrust/distance.h> #include <thrust/execution_policy.h> #include <thrust/find.h> #include <thrust/pair.h> namespace nvtext { namespace detail { namespace { using replace_result = thrust::pair<bool, cudf::string_view>; struct base_token_replacer_fn { cudf::column_device_view const d_strings; ///< strings to tokenize cudf::string_view const d_delimiter; ///< delimiter characters for tokenizing cudf::size_type* d_offsets{}; ///< for locating output string in d_chars char* d_chars{}; ///< output buffer /** * @brief Tokenizes each string and calls the provided `replacer` function * for each token. * * @tparam ReplaceFn Should accept a `string_view` and return a `replace_result` * @param idx Index of the current string to process * @param replacer Function to call for each token to determined its replacement */ template <typename ReplaceFn> __device__ void process_string(cudf::size_type idx, ReplaceFn replacer) { if (d_strings.is_null(idx)) { if (!d_chars) d_offsets[idx] = 0; return; } auto const d_str = d_strings.element<cudf::string_view>(idx); auto const in_ptr = d_str.data(); auto out_ptr = d_chars ? d_chars + d_offsets[idx] : nullptr; auto nbytes = d_str.size_bytes(); // count the output bytes auto last_pos = cudf::size_type{0}; auto tokenizer = characters_tokenizer{d_str, d_delimiter}; // process each token while (tokenizer.next_token()) { auto const token_pos = tokenizer.token_byte_positions(); auto const token = cudf::string_view{d_str.data() + token_pos.first, token_pos.second - token_pos.first}; // ask replacer if this token should be replaced auto const result = replacer(token); if (result.first) { // first == replace indicator, second == new string auto d_replacement = result.second; nbytes += d_replacement.size_bytes() - token.size_bytes(); if (out_ptr) { // copy over string up to the token location out_ptr = cudf::strings::detail::copy_and_increment( out_ptr, in_ptr + last_pos, token_pos.first - last_pos); // copy over replacement string out_ptr = cudf::strings::detail::copy_string(out_ptr, d_replacement); last_pos = token_pos.second; // update last byte position for this string } } } // copy the remainder of the string's bytes to the output buffer if (out_ptr) memcpy(out_ptr, in_ptr + last_pos, d_str.size_bytes() - last_pos); else d_offsets[idx] = nbytes; } }; using strings_iterator = cudf::column_device_view::const_iterator<cudf::string_view>; /** * @brief Functor to replace tokens in each string. * * This tokenizes a string using the given d_delimiter and replaces any tokens that match * a string in d_targets_begin/end with those from the d_replacements column. * Strings with no matching tokens are left unchanged. * * This should be called first to compute the size of each output string and then a second * time to fill in the allocated output buffer for each string. */ struct replace_tokens_fn : base_token_replacer_fn { strings_iterator d_targets_begin; ///< strings to search for strings_iterator d_targets_end; cudf::column_device_view const d_replacements; ///< replacement strings replace_tokens_fn(cudf::column_device_view const& d_strings, cudf::string_view const& d_delimiter, strings_iterator d_targets_begin, strings_iterator d_targets_end, cudf::column_device_view const& d_replacements) : base_token_replacer_fn{d_strings, d_delimiter}, d_targets_begin{d_targets_begin}, d_targets_end{d_targets_end}, d_replacements{d_replacements} { } /** * @brief Return replacement string for the given token. * * @param token Token candidate to be replaced. * @return result pair specifies replacement condition and new string */ __device__ replace_result token_replacement(cudf::string_view const& token) { // check if the token matches any of the targets auto const found_itr = thrust::find(thrust::seq, d_targets_begin, d_targets_end, token); if (found_itr != d_targets_end) { // match found // retrieve the corresponding replacement string or // if only one repl string, use that one for all targets auto const d_repl = [&] { auto const repl_idx = thrust::distance(d_targets_begin, found_itr); return d_replacements.size() == 1 ? d_replacements.element<cudf::string_view>(0) : d_replacements.element<cudf::string_view>(repl_idx); }(); return replace_result{true, d_repl}; } // otherwise, do not replace this token return replace_result{false, cudf::string_view()}; } __device__ void operator()(cudf::size_type idx) { process_string( idx, [this] __device__(cudf::string_view const& token) { return token_replacement(token); }); } }; /** * @brief Functor to filter tokens in each string. * * This tokenizes a string using the given d_delimiter and replaces any tokens * that are shorter than min_token_length with a replacement string. * * This should be called first to compute the size of each output string and then * a second time to fill in the allocated output buffer for each string. */ struct remove_small_tokens_fn : base_token_replacer_fn { cudf::size_type min_token_length; ///< minimum size for found tokens cudf::string_view const d_replacement; ///< replacement string remove_small_tokens_fn(cudf::column_device_view const& d_strings, cudf::string_view const& d_delimiter, cudf::size_type min_token_length, cudf::string_view const& d_replacement) : base_token_replacer_fn{d_strings, d_delimiter}, min_token_length{min_token_length}, d_replacement{d_replacement} { } __device__ void operator()(cudf::size_type idx) { auto replacer = [this] __device__(cudf::string_view const& token) { return replace_result{token.length() < min_token_length, d_replacement}; }; process_string(idx, replacer); } }; } // namespace // detail APIs std::unique_ptr<cudf::column> replace_tokens(cudf::strings_column_view const& strings, cudf::strings_column_view const& targets, cudf::strings_column_view const& replacements, cudf::string_scalar const& delimiter, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { CUDF_EXPECTS(!targets.has_nulls(), "Parameter targets must not have nulls"); CUDF_EXPECTS(!replacements.has_nulls(), "Parameter replacements must not have nulls"); if (replacements.size() != 1) CUDF_EXPECTS(replacements.size() == targets.size(), "Parameter targets and replacements must be the same size"); CUDF_EXPECTS(delimiter.is_valid(stream), "Parameter delimiter must be valid"); cudf::size_type const strings_count = strings.size(); if (strings_count == 0) return cudf::make_empty_column(cudf::data_type{cudf::type_id::STRING}); auto strings_column = cudf::column_device_view::create(strings.parent(), stream); auto targets_column = cudf::column_device_view::create(targets.parent(), stream); auto replacements_column = cudf::column_device_view::create(replacements.parent(), stream); cudf::string_view d_delimiter(delimiter.data(), delimiter.size()); replace_tokens_fn replacer{*strings_column, d_delimiter, targets_column->begin<cudf::string_view>(), targets_column->end<cudf::string_view>(), *replacements_column}; // copy null mask from input column rmm::device_buffer null_mask = cudf::detail::copy_bitmask(strings.parent(), stream, mr); // this utility calls replacer to build the offsets and chars columns auto children = cudf::strings::detail::make_strings_children(replacer, strings_count, stream, mr); // return new strings column return cudf::make_strings_column(strings_count, std::move(children.first), std::move(children.second), strings.null_count(), std::move(null_mask)); } std::unique_ptr<cudf::column> filter_tokens(cudf::strings_column_view const& strings, cudf::size_type min_token_length, cudf::string_scalar const& replacement, cudf::string_scalar const& delimiter, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { CUDF_EXPECTS(replacement.is_valid(stream), "Parameter replacement must be valid"); CUDF_EXPECTS(delimiter.is_valid(stream), "Parameter delimiter must be valid"); cudf::size_type const strings_count = strings.size(); if (strings_count == 0) return cudf::make_empty_column(cudf::data_type{cudf::type_id::STRING}); auto strings_column = cudf::column_device_view::create(strings.parent(), stream); cudf::string_view d_replacement(replacement.data(), replacement.size()); cudf::string_view d_delimiter(delimiter.data(), delimiter.size()); remove_small_tokens_fn filterer{*strings_column, d_delimiter, min_token_length, d_replacement}; // copy null mask from input column rmm::device_buffer null_mask = cudf::detail::copy_bitmask(strings.parent(), stream, mr); // this utility calls filterer to build the offsets and chars columns auto children = cudf::strings::detail::make_strings_children(filterer, strings_count, stream, mr); // return new strings column return cudf::make_strings_column(strings_count, std::move(children.first), std::move(children.second), strings.null_count(), std::move(null_mask)); } } // namespace detail // external APIs std::unique_ptr<cudf::column> replace_tokens(cudf::strings_column_view const& input, cudf::strings_column_view const& targets, cudf::strings_column_view const& replacements, cudf::string_scalar const& delimiter, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { CUDF_FUNC_RANGE(); return detail::replace_tokens(input, targets, replacements, delimiter, stream, mr); } std::unique_ptr<cudf::column> filter_tokens(cudf::strings_column_view const& input, cudf::size_type min_token_length, cudf::string_scalar const& replacement, cudf::string_scalar const& delimiter, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { CUDF_FUNC_RANGE(); return detail::filter_tokens(input, min_token_length, replacement, delimiter, stream, mr); } } // namespace nvtext
0
rapidsai_public_repos/cudf/cpp/src
rapidsai_public_repos/cudf/cpp/src/text/minhash.cu
/* * Copyright (c) 2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include <nvtext/minhash.hpp> #include <cudf/column/column.hpp> #include <cudf/column/column_device_view.cuh> #include <cudf/column/column_factories.hpp> #include <cudf/detail/copy.hpp> #include <cudf/detail/null_mask.hpp> #include <cudf/detail/nvtx/ranges.hpp> #include <cudf/detail/sequence.hpp> #include <cudf/detail/utilities/cuda.cuh> #include <cudf/hashing/detail/hashing.hpp> #include <cudf/hashing/detail/murmurhash3_x64_128.cuh> #include <cudf/hashing/detail/murmurhash3_x86_32.cuh> #include <cudf/strings/string_view.cuh> #include <cudf/utilities/default_stream.hpp> #include <cudf/utilities/error.hpp> #include <rmm/cuda_stream_view.hpp> #include <rmm/exec_policy.hpp> #include <thrust/execution_policy.h> #include <thrust/fill.h> #include <limits> #include <cuda/atomic> namespace nvtext { namespace detail { namespace { /** * @brief Compute the minhash of each string for each seed * * This is a warp-per-string algorithm where parallel threads within a warp * work on substrings of a single string row. * * @tparam HashFunction hash function to use on each substring * * @param d_strings Strings column to process * @param seeds Seeds for hashing each string * @param width Substring window size in characters * @param d_hashes Minhash output values for each string */ template < typename HashFunction, typename hash_value_type = std:: conditional_t<std::is_same_v<typename HashFunction::result_type, uint32_t>, uint32_t, uint64_t>> __global__ void minhash_kernel(cudf::column_device_view const d_strings, cudf::device_span<hash_value_type const> seeds, cudf::size_type width, hash_value_type* d_hashes) { auto const idx = static_cast<std::size_t>(threadIdx.x + blockIdx.x * blockDim.x); if (idx >= (static_cast<std::size_t>(d_strings.size()) * static_cast<std::size_t>(cudf::detail::warp_size))) { return; } auto const str_idx = static_cast<cudf::size_type>(idx / cudf::detail::warp_size); auto const lane_idx = static_cast<cudf::size_type>(idx % cudf::detail::warp_size); if (d_strings.is_null(str_idx)) { return; } auto const d_str = d_strings.element<cudf::string_view>(str_idx); auto const d_output = d_hashes + (str_idx * seeds.size()); // initialize hashes output for this string if (lane_idx == 0) { auto const init = d_str.empty() ? 0 : std::numeric_limits<hash_value_type>::max(); thrust::fill(thrust::seq, d_output, d_output + seeds.size(), init); } __syncwarp(); auto const begin = d_str.data() + lane_idx; auto const end = d_str.data() + d_str.size_bytes(); // each lane hashes 'width' substrings of d_str for (auto itr = begin; itr < end; itr += cudf::detail::warp_size) { if (cudf::strings::detail::is_utf8_continuation_char(*itr)) { continue; } auto const check_str = // used for counting 'width' characters cudf::string_view(itr, static_cast<cudf::size_type>(thrust::distance(itr, end))); auto const [bytes, left] = cudf::strings::detail::bytes_to_character_position(check_str, width); if ((itr != d_str.data()) && (left > 0)) { continue; } // true if past the end of the string auto const hash_str = cudf::string_view(itr, bytes); // hashing with each seed on the same section of the string is 10x faster than // computing the substrings for each seed for (std::size_t seed_idx = 0; seed_idx < seeds.size(); ++seed_idx) { auto const hasher = HashFunction(seeds[seed_idx]); // hash substring and store the min value if constexpr (std::is_same_v<hash_value_type, uint32_t>) { auto const hvalue = hasher(hash_str); cuda::atomic_ref<hash_value_type, cuda::thread_scope_block> ref{*(d_output + seed_idx)}; ref.fetch_min(hvalue, cuda::std::memory_order_relaxed); } else { // This code path assumes the use of MurmurHash3_x64_128 which produces 2 uint64 values // but only uses the first uint64 value as requested by the LLM team. auto const hvalue = thrust::get<0>(hasher(hash_str)); cuda::atomic_ref<hash_value_type, cuda::thread_scope_block> ref{*(d_output + seed_idx)}; ref.fetch_min(hvalue, cuda::std::memory_order_relaxed); } } } } template < typename HashFunction, typename hash_value_type = std:: conditional_t<std::is_same_v<typename HashFunction::result_type, uint32_t>, uint32_t, uint64_t>> std::unique_ptr<cudf::column> minhash_fn(cudf::strings_column_view const& input, cudf::device_span<hash_value_type const> seeds, cudf::size_type width, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { CUDF_EXPECTS(!seeds.empty(), "Parameter seeds cannot be empty", std::invalid_argument); CUDF_EXPECTS(width >= 2, "Parameter width should be an integer value of 2 or greater", std::invalid_argument); CUDF_EXPECTS((static_cast<std::size_t>(input.size()) * seeds.size()) < static_cast<std::size_t>(std::numeric_limits<cudf::size_type>::max()), "The number of seeds times the number of input rows exceeds the column size limit", std::overflow_error); auto const output_type = cudf::data_type{cudf::type_to_id<hash_value_type>()}; if (input.is_empty()) { return cudf::make_empty_column(output_type); } auto const d_strings = cudf::column_device_view::create(input.parent(), stream); auto hashes = cudf::make_numeric_column(output_type, input.size() * static_cast<cudf::size_type>(seeds.size()), cudf::mask_state::UNALLOCATED, stream, mr); auto d_hashes = hashes->mutable_view().data<hash_value_type>(); constexpr int block_size = 256; cudf::detail::grid_1d grid{input.size() * cudf::detail::warp_size, block_size}; minhash_kernel<HashFunction><<<grid.num_blocks, grid.num_threads_per_block, 0, stream.value()>>>( *d_strings, seeds, width, d_hashes); return hashes; } std::unique_ptr<cudf::column> build_list_result(cudf::strings_column_view const& input, std::unique_ptr<cudf::column>&& hashes, cudf::size_type seeds_size, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { // build the offsets for the output lists column auto const zero = cudf::numeric_scalar<cudf::size_type>(0); auto const size = cudf::numeric_scalar<cudf::size_type>(seeds_size); auto offsets = cudf::detail::sequence(input.size() + 1, zero, size, stream, mr); hashes->set_null_mask(rmm::device_buffer{}, 0); // children have no nulls // build the lists column from the offsets and the hashes auto result = make_lists_column(input.size(), std::move(offsets), std::move(hashes), input.null_count(), cudf::detail::copy_bitmask(input.parent(), stream, mr), stream, mr); // expect this condition to be very rare if (input.null_count() > 0) { result = cudf::detail::purge_nonempty_nulls(result->view(), stream, mr); } return result; } } // namespace std::unique_ptr<cudf::column> minhash(cudf::strings_column_view const& input, cudf::numeric_scalar<uint32_t> seed, cudf::size_type width, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { using HashFunction = cudf::hashing::detail::MurmurHash3_x86_32<cudf::string_view>; auto const seeds = cudf::device_span<uint32_t const>{seed.data(), 1}; auto hashes = detail::minhash_fn<HashFunction>(input, seeds, width, stream, mr); hashes->set_null_mask(cudf::detail::copy_bitmask(input.parent(), stream, mr), input.null_count()); return hashes; } std::unique_ptr<cudf::column> minhash(cudf::strings_column_view const& input, cudf::device_span<uint32_t const> seeds, cudf::size_type width, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { using HashFunction = cudf::hashing::detail::MurmurHash3_x86_32<cudf::string_view>; auto hashes = detail::minhash_fn<HashFunction>(input, seeds, width, stream, mr); return build_list_result(input, std::move(hashes), seeds.size(), stream, mr); } std::unique_ptr<cudf::column> minhash64(cudf::strings_column_view const& input, cudf::numeric_scalar<uint64_t> seed, cudf::size_type width, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { using HashFunction = cudf::hashing::detail::MurmurHash3_x64_128<cudf::string_view>; auto const seeds = cudf::device_span<uint64_t const>{seed.data(), 1}; auto hashes = detail::minhash_fn<HashFunction>(input, seeds, width, stream, mr); hashes->set_null_mask(cudf::detail::copy_bitmask(input.parent(), stream, mr), input.null_count()); return hashes; } std::unique_ptr<cudf::column> minhash64(cudf::strings_column_view const& input, cudf::device_span<uint64_t const> seeds, cudf::size_type width, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { using HashFunction = cudf::hashing::detail::MurmurHash3_x64_128<cudf::string_view>; auto hashes = detail::minhash_fn<HashFunction>(input, seeds, width, stream, mr); return build_list_result(input, std::move(hashes), seeds.size(), stream, mr); } } // namespace detail std::unique_ptr<cudf::column> minhash(cudf::strings_column_view const& input, cudf::numeric_scalar<uint32_t> seed, cudf::size_type width, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { CUDF_FUNC_RANGE(); return detail::minhash(input, seed, width, stream, mr); } std::unique_ptr<cudf::column> minhash(cudf::strings_column_view const& input, cudf::device_span<uint32_t const> seeds, cudf::size_type width, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { CUDF_FUNC_RANGE(); return detail::minhash(input, seeds, width, stream, mr); } std::unique_ptr<cudf::column> minhash64(cudf::strings_column_view const& input, cudf::numeric_scalar<uint64_t> seed, cudf::size_type width, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { CUDF_FUNC_RANGE(); return detail::minhash64(input, seed, width, stream, mr); } std::unique_ptr<cudf::column> minhash64(cudf::strings_column_view const& input, cudf::device_span<uint64_t const> seeds, cudf::size_type width, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { CUDF_FUNC_RANGE(); return detail::minhash64(input, seeds, width, stream, mr); } } // namespace nvtext
0
rapidsai_public_repos/cudf/cpp/src
rapidsai_public_repos/cudf/cpp/src/text/detokenize.cu
/* * Copyright (c) 2020-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include <nvtext/tokenize.hpp> #include <cudf/column/column.hpp> #include <cudf/column/column_device_view.cuh> #include <cudf/column/column_factories.hpp> #include <cudf/detail/indexalator.cuh> #include <cudf/detail/nvtx/ranges.hpp> #include <cudf/detail/sorting.hpp> #include <cudf/strings/detail/strings_children.cuh> #include <cudf/strings/detail/utilities.cuh> #include <cudf/strings/string_view.cuh> #include <cudf/strings/strings_column_view.hpp> #include <cudf/table/table_view.hpp> #include <cudf/utilities/default_stream.hpp> #include <cudf/utilities/error.hpp> #include <rmm/cuda_stream_view.hpp> #include <rmm/device_uvector.hpp> #include <thrust/copy.h> #include <thrust/count.h> #include <thrust/iterator/counting_iterator.h> namespace nvtext { namespace detail { namespace { /** * @brief Generate strings from tokens. * * Each string is created by appending all the tokens assigned to * the same row. The `d_separator` is appended between each token. */ struct detokenizer_fn { cudf::column_device_view const d_strings; // these are the tokens cudf::size_type const* d_row_map; // indices sorted by output row cudf::size_type const* d_token_offsets; // to each input token array cudf::string_view const d_separator; // append after each token cudf::size_type* d_offsets{}; // offsets to output buffer d_chars char* d_chars{}; // output buffer for characters __device__ void operator()(cudf::size_type idx) { auto const offset = d_token_offsets[idx]; auto d_tokens = d_row_map + offset; auto const token_count = d_token_offsets[idx + 1] - offset; auto out_ptr = d_chars ? d_chars + d_offsets[idx] : nullptr; cudf::size_type nbytes = 0; for (cudf::size_type jdx = 0; jdx < token_count; ++jdx) { auto const str_index = d_tokens[jdx]; if (d_strings.is_null(str_index)) continue; auto const d_str = d_strings.element<cudf::string_view>(str_index); if (out_ptr) { out_ptr = cudf::strings::detail::copy_string(out_ptr, d_str); if (jdx + 1 < token_count) out_ptr = cudf::strings::detail::copy_string(out_ptr, d_separator); } else { nbytes += d_str.size_bytes(); nbytes += d_separator.size_bytes(); } } if (!d_chars) { d_offsets[idx] = (nbytes > 0) ? (nbytes - d_separator.size_bytes()) : 0; } } }; struct index_changed_fn { cudf::detail::input_indexalator const d_rows; cudf::size_type const* d_row_map; __device__ bool operator()(cudf::size_type idx) const { return (idx == 0) || (d_rows[d_row_map[idx]] != d_rows[d_row_map[idx - 1]]); } }; /** * @brief Convert the row indices into token offsets * * @param row_indices Indices where each token should land * @param sorted_indices Map of row_indices sorted * @param tokens_counts Token counts for each row * @param stream CUDA stream used for kernel launches */ rmm::device_uvector<cudf::size_type> create_token_row_offsets( cudf::column_view const& row_indices, cudf::column_view const& sorted_indices, cudf::size_type tokens_counts, rmm::cuda_stream_view stream) { index_changed_fn fn{cudf::detail::indexalator_factory::make_input_iterator(row_indices), sorted_indices.data<cudf::size_type>()}; auto const output_count = thrust::count_if(rmm::exec_policy(stream), thrust::make_counting_iterator<cudf::size_type>(0), thrust::make_counting_iterator<cudf::size_type>(tokens_counts), fn); auto tokens_offsets = rmm::device_uvector<cudf::size_type>(output_count + 1, stream); thrust::copy_if(rmm::exec_policy(stream), thrust::make_counting_iterator<cudf::size_type>(0), thrust::make_counting_iterator<cudf::size_type>(tokens_counts), tokens_offsets.begin(), fn); // set the last element to the total number of tokens tokens_offsets.set_element(output_count, tokens_counts, stream); return tokens_offsets; } } // namespace /** * @copydoc nvtext::detokenize */ std::unique_ptr<cudf::column> detokenize(cudf::strings_column_view const& strings, cudf::column_view const& row_indices, cudf::string_scalar const& separator, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { CUDF_EXPECTS(separator.is_valid(stream), "Parameter separator must be valid"); CUDF_EXPECTS(row_indices.size() == strings.size(), "Parameter row_indices must be the same size as the input column"); CUDF_EXPECTS(not row_indices.has_nulls(), "Parameter row_indices must not have nulls"); auto tokens_counts = strings.size(); if (tokens_counts == 0) // if no input strings, return an empty column return cudf::make_empty_column(cudf::data_type{cudf::type_id::STRING}); auto strings_column = cudf::column_device_view::create(strings.parent(), stream); // the indices may not be in order so we need to build a sorted map auto sorted_rows = cudf::detail::stable_sorted_order( cudf::table_view({row_indices}), {}, {}, stream, rmm::mr::get_current_device_resource()); auto const d_row_map = sorted_rows->view().data<cudf::size_type>(); // create offsets for the tokens for each output string auto tokens_offsets = create_token_row_offsets(row_indices, sorted_rows->view(), tokens_counts, stream); auto const output_count = tokens_offsets.size() - 1; // number of output strings cudf::string_view const d_separator(separator.data(), separator.size()); auto children = cudf::strings::detail::make_strings_children( detokenizer_fn{*strings_column, d_row_map, tokens_offsets.data(), d_separator}, output_count, stream, mr); // make the output strings column from the offsets and chars column return cudf::make_strings_column( output_count, std::move(children.first), std::move(children.second), 0, rmm::device_buffer{}); } } // namespace detail std::unique_ptr<cudf::column> detokenize(cudf::strings_column_view const& input, cudf::column_view const& row_indices, cudf::string_scalar const& separator, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { CUDF_FUNC_RANGE(); return detail::detokenize(input, row_indices, separator, stream, mr); } } // namespace nvtext
0
rapidsai_public_repos/cudf/cpp/src
rapidsai_public_repos/cudf/cpp/src/text/stemmer.cu
/* * Copyright (c) 2020-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include <nvtext/stemmer.hpp> #include <cudf/column/column.hpp> #include <cudf/column/column_device_view.cuh> #include <cudf/column/column_factories.hpp> #include <cudf/column/column_view.hpp> #include <cudf/detail/null_mask.hpp> #include <cudf/detail/nvtx/ranges.hpp> #include <cudf/strings/detail/utilities.cuh> #include <cudf/strings/detail/utilities.hpp> #include <cudf/strings/string_view.cuh> #include <cudf/strings/strings_column_view.hpp> #include <cudf/utilities/default_stream.hpp> #include <rmm/cuda_stream_view.hpp> #include <rmm/exec_policy.hpp> #include <thrust/for_each.h> #include <thrust/iterator/constant_iterator.h> #include <thrust/iterator/counting_iterator.h> #include <thrust/transform.h> namespace nvtext { namespace detail { namespace { /** * @brief Return true if character at current iterator position * is a consonant. * * A consonant is a letter other than a, e, i, o or u, and other * than y preceded by a consonant. * * For `toy` the consonants are `t` and `y`, and in `syzygy` they * are `s`, `z` and `g`. * * A _vowel_ is defined as _not a consonant_. * * @param string_iterator Iterator positioned to the character to check. * @return True if the character at the iterator is a consonant. */ __device__ bool is_consonant(cudf::string_view::const_iterator string_iterator) { auto ch = *string_iterator; cudf::string_view const d_vowels("aeiou", 5); if (d_vowels.find(ch) != cudf::string_view::npos) return false; if ((ch != 'y') || (string_iterator.position() == 0)) return true; // for 'y' case, check previous character is a consonant --string_iterator; return d_vowels.find(*string_iterator) != cudf::string_view::npos; } /** * @brief Functor for the detail::is_letter_fn called to return true/false * indicating the specified character is a consonant or a vowel. */ template <typename PositionIterator> struct is_letter_fn { cudf::column_device_view const d_strings; letter_type ltype; PositionIterator position_itr; __device__ bool operator()(cudf::size_type idx) { if (d_strings.is_null(idx)) return false; auto const d_str = d_strings.element<cudf::string_view>(idx); if (d_str.empty()) return false; auto const position = position_itr[idx]; auto const length = d_str.length(); if ((position >= length) || (position < -length)) return false; return is_consonant(d_str.begin() + ((position + length) % length)) ? ltype == letter_type::CONSONANT : ltype == letter_type::VOWEL; } }; } // namespace // details API template <typename PositionIterator> std::unique_ptr<cudf::column> is_letter(cudf::strings_column_view const& strings, letter_type ltype, PositionIterator position_itr, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { if (strings.is_empty()) return cudf::make_empty_column(cudf::data_type{cudf::type_id::BOOL8}); // create empty output column auto results = cudf::make_fixed_width_column(cudf::data_type{cudf::type_id::BOOL8}, strings.size(), cudf::detail::copy_bitmask(strings.parent(), stream, mr), strings.null_count(), stream, mr); // set values into output column auto strings_column = cudf::column_device_view::create(strings.parent(), stream); thrust::transform(rmm::exec_policy(stream), thrust::make_counting_iterator<cudf::size_type>(0), thrust::make_counting_iterator<cudf::size_type>(strings.size()), results->mutable_view().data<bool>(), is_letter_fn<PositionIterator>{*strings_column, ltype, position_itr}); results->set_null_count(strings.null_count()); return results; } namespace { /** * @brief For dispatching index-type of indices parameter in the nvtext::is_letter API. */ struct dispatch_is_letter_fn { template <typename T, std::enable_if_t<cudf::is_index_type<T>()>* = nullptr> std::unique_ptr<cudf::column> operator()(cudf::strings_column_view const& strings, letter_type ltype, cudf::column_view const& indices, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) const { CUDF_EXPECTS(strings.size() == indices.size(), "strings column and indices column must be the same size"); CUDF_EXPECTS(!indices.has_nulls(), "indices column must not contain nulls"); // resolve and pass an iterator for the indices column to the detail function return is_letter(strings, ltype, indices.begin<T>(), stream, mr); } template <typename T, typename... Args, std::enable_if_t<not cudf::is_index_type<T>()>* = nullptr> std::unique_ptr<cudf::column> operator()(Args&&...) const { CUDF_FAIL("The is_letter indices parameter must be an integer type."); } }; /** * @brief Returns the measure for each string. * * Text description here is from https://tartarus.org/martin/PorterStemmer/def.txt * * A consonant will be denoted by `c`, a vowel by `v`. A list `ccc...` of length * greater than 0 will be denoted by `C`, and a list `vvv...` of length greater * than 0 will be denoted by `V`. Any word, or part of a word, therefore has one * of the four forms: * * @code{.pseudo} * CVCV ... C * CVCV ... V * VCVC ... C * VCVC ... V * @endcode * * These may all be represented by the single form `[C]VCVC ... [V]` * where the square brackets denote arbitrary presence of their contents. * Using `(VC){m}` to denote `VC` repeated `m` times, this may again be written as * `[C](VC){m}[V]`. * * And `m` will be called the _measure_ of any word or word part when represented in * this form. The case `m = 0` covers the null or empty string. * * Examples: * @code{.pseudo} * m=0: TR, EE, TREE, Y, BY. * m=1: TROUBLE, OATS, TREES, IVY. * m=2: TROUBLES, PRIVATE, OATEN, ORRERY. * @endcode */ struct porter_stemmer_measure_fn { cudf::column_device_view const d_strings; // strings to measure __device__ cudf::size_type operator()(cudf::size_type idx) const { if (d_strings.is_null(idx)) { return 0; } cudf::string_view d_str = d_strings.element<cudf::string_view>(idx); if (d_str.empty()) { return 0; } cudf::size_type measure = 0; auto itr = d_str.begin(); bool vowel_run = !is_consonant(itr); while (itr != d_str.end()) { if (is_consonant(itr)) { if (vowel_run) { measure++; } vowel_run = false; } else { vowel_run = true; } ++itr; } return measure; } }; } // namespace std::unique_ptr<cudf::column> porter_stemmer_measure(cudf::strings_column_view const& strings, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { if (strings.is_empty()) { return cudf::make_empty_column(cudf::data_type{cudf::type_to_id<cudf::size_type>()}); } // create empty output column auto results = cudf::make_fixed_width_column(cudf::data_type{cudf::type_to_id<cudf::size_type>()}, strings.size(), cudf::detail::copy_bitmask(strings.parent(), stream, mr), strings.null_count(), stream, mr); // compute measures into output column auto strings_column = cudf::column_device_view::create(strings.parent(), stream); thrust::transform(rmm::exec_policy(stream), thrust::make_counting_iterator<cudf::size_type>(0), thrust::make_counting_iterator<cudf::size_type>(strings.size()), results->mutable_view().data<cudf::size_type>(), porter_stemmer_measure_fn{*strings_column}); results->set_null_count(strings.null_count()); return results; } std::unique_ptr<cudf::column> is_letter(cudf::strings_column_view const& strings, letter_type ltype, cudf::column_view const& indices, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { return cudf::type_dispatcher( indices.type(), dispatch_is_letter_fn{}, strings, ltype, indices, stream, mr); } } // namespace detail // external APIs std::unique_ptr<cudf::column> is_letter(cudf::strings_column_view const& input, letter_type ltype, cudf::size_type character_index, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { CUDF_FUNC_RANGE(); return detail::is_letter( input, ltype, thrust::make_constant_iterator<cudf::size_type>(character_index), stream, mr); } std::unique_ptr<cudf::column> is_letter(cudf::strings_column_view const& input, letter_type ltype, cudf::column_view const& indices, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { CUDF_FUNC_RANGE(); return detail::is_letter(input, ltype, indices, stream, mr); } /** * @copydoc nvtext::porter_stemmer_measure */ std::unique_ptr<cudf::column> porter_stemmer_measure(cudf::strings_column_view const& input, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { CUDF_FUNC_RANGE(); return detail::porter_stemmer_measure(input, stream, mr); } } // namespace nvtext
0
rapidsai_public_repos/cudf/cpp/src
rapidsai_public_repos/cudf/cpp/src/text/vocabulary_tokenize.cu
/* * Copyright (c) 2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include <text/utilities/tokenize_ops.cuh> #include <nvtext/tokenize.hpp> #include <cudf/column/column.hpp> #include <cudf/column/column_device_view.cuh> #include <cudf/column/column_factories.hpp> #include <cudf/detail/get_value.cuh> #include <cudf/detail/iterator.cuh> #include <cudf/detail/null_mask.hpp> #include <cudf/detail/nvtx/ranges.hpp> #include <cudf/detail/sizes_to_offsets_iterator.cuh> #include <cudf/detail/utilities/cuda.cuh> #include <cudf/hashing/detail/hash_allocator.cuh> #include <cudf/hashing/detail/murmurhash3_x86_32.cuh> #include <cudf/strings/string_view.cuh> #include <cudf/strings/strings_column_view.hpp> #include <cudf/utilities/default_stream.hpp> #include <cudf/utilities/error.hpp> #include <rmm/cuda_stream_view.hpp> #include <rmm/mr/device/polymorphic_allocator.hpp> #include <cuco/static_map.cuh> #include <thrust/copy.h> #include <thrust/distance.h> #include <thrust/execution_policy.h> #include <thrust/functional.h> #include <thrust/logical.h> #include <thrust/transform.h> #include <cub/cub.cuh> namespace nvtext { namespace detail { namespace { using string_hasher_type = cudf::hashing::detail::MurmurHash3_x86_32<cudf::string_view>; using hash_value_type = string_hasher_type::result_type; /** * @brief Hasher function used for building and using the cuco static-map * * This takes advantage of heterogeneous lookup feature in cuco static-map which * allows inserting with one type (index) and looking up with a different type (string). */ struct vocab_hasher { cudf::column_device_view const d_strings; string_hasher_type hasher{}; // used by insert __device__ hash_value_type operator()(cudf::size_type index) const { return hasher(d_strings.element<cudf::string_view>(index)); } // used by find __device__ hash_value_type operator()(cudf::string_view const& s) const { return hasher(s); } }; /** * @brief Equal function used for building and using the cuco static-map * * This takes advantage of heterogeneous lookup feature in cuco static-map which * allows inserting with one type (index) and looking up with a different type (string). */ struct vocab_equal { cudf::column_device_view const d_strings; // used by insert __device__ bool operator()(cudf::size_type lhs, cudf::size_type rhs) const noexcept { return lhs == rhs; // all rows are expected to be unique } // used by find __device__ bool operator()(cudf::size_type lhs, cudf::string_view const& rhs) const noexcept { return d_strings.element<cudf::string_view>(lhs) == rhs; } }; using hash_table_allocator_type = rmm::mr::stream_allocator_adaptor<default_allocator<char>>; using probe_scheme = cuco::experimental::linear_probing<1, vocab_hasher>; using vocabulary_map_type = cuco::experimental::static_map<cudf::size_type, cudf::size_type, cuco::experimental::extent<std::size_t>, cuda::thread_scope_device, vocab_equal, probe_scheme, hash_table_allocator_type>; } // namespace } // namespace detail // since column_device_view::create returns is a little more than // std::unique_ptr<column_device_view> this helper simplifies the return type in a maintainable way using col_device_view = std::invoke_result_t<decltype(&cudf::column_device_view::create), cudf::column_view, rmm::cuda_stream_view>; struct tokenize_vocabulary::tokenize_vocabulary_impl { std::unique_ptr<cudf::column> const vocabulary; col_device_view const d_vocabulary; std::unique_ptr<detail::vocabulary_map_type> vocabulary_map; auto get_map_ref() const { return vocabulary_map->ref(cuco::experimental::op::find); } tokenize_vocabulary_impl(std::unique_ptr<cudf::column>&& vocab, col_device_view&& d_vocab, std::unique_ptr<detail::vocabulary_map_type>&& map) : vocabulary(std::move(vocab)), d_vocabulary(std::move(d_vocab)), vocabulary_map(std::move(map)) { } }; struct key_pair { __device__ auto operator()(cudf::size_type idx) const noexcept { return cuco::make_pair(idx, idx); } }; tokenize_vocabulary::tokenize_vocabulary(cudf::strings_column_view const& input, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { CUDF_EXPECTS(not input.is_empty(), "vocabulary must not be empty"); CUDF_EXPECTS(not input.has_nulls(), "vocabulary must not have nulls"); // need to hold a copy of the input auto vocabulary = std::make_unique<cudf::column>(input.parent(), stream, mr); auto d_vocabulary = cudf::column_device_view::create(vocabulary->view(), stream); auto vocab_map = std::make_unique<detail::vocabulary_map_type>( static_cast<size_t>(vocabulary->size() * 2), cuco::empty_key{-1}, cuco::empty_value{-1}, detail::vocab_equal{*d_vocabulary}, detail::probe_scheme{detail::vocab_hasher{*d_vocabulary}}, detail::hash_table_allocator_type{default_allocator<char>{}, stream}, stream.value()); // the row index is the token id (value for each key in the map) auto iter = cudf::detail::make_counting_transform_iterator(0, key_pair{}); vocab_map->insert_async(iter, iter + vocabulary->size(), stream.value()); _impl = new tokenize_vocabulary_impl( std::move(vocabulary), std::move(d_vocabulary), std::move(vocab_map)); } tokenize_vocabulary::~tokenize_vocabulary() { delete _impl; } std::unique_ptr<tokenize_vocabulary> load_vocabulary(cudf::strings_column_view const& input, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { CUDF_FUNC_RANGE(); return std::make_unique<tokenize_vocabulary>(input, stream, mr); } namespace detail { namespace { /** * @brief Threshold to decide on using string or warp parallel functions. * * If the average byte length of a string in a column exceeds this value then * the warp-parallel function is used to compute the output sizes. * Otherwise, a regular string-parallel function is used. * * This value was found using the vocab_tokenize benchmark results. */ constexpr cudf::size_type AVG_CHAR_BYTES_THRESHOLD = 128; constexpr int block_size = 256; __device__ bool is_delimiter(cudf::string_view const& d_delimiters, cudf::char_utf8 chr) { return d_delimiters.empty() ? (chr <= ' ') : // whitespace check thrust::any_of(thrust::seq, d_delimiters.begin(), d_delimiters.end(), [chr] __device__(cudf::char_utf8 c) { return c == chr; }); } struct mark_delimiters_fn { char const* d_chars; cudf::string_view const d_delimiter; int8_t* d_results; __device__ void operator()(cudf::size_type idx) const { auto const ptr = d_chars + idx; if (cudf::strings::detail::is_utf8_continuation_char(*ptr)) { return; } cudf::char_utf8 chr = 0; auto ch_size = cudf::strings::detail::to_char_utf8(ptr, chr); auto const output = is_delimiter(d_delimiter, chr); while (ch_size > 0) { d_results[idx++] = output; --ch_size; } } }; __global__ void token_counts_fn(cudf::column_device_view const d_strings, cudf::string_view const d_delimiter, cudf::size_type* d_counts, int8_t* d_results) { // string per warp auto const idx = static_cast<std::size_t>(threadIdx.x + blockIdx.x * blockDim.x); if (idx >= (static_cast<std::size_t>(d_strings.size()) * static_cast<std::size_t>(cudf::detail::warp_size))) { return; } auto const str_idx = static_cast<cudf::size_type>(idx / cudf::detail::warp_size); auto const lane_idx = static_cast<cudf::size_type>(idx % cudf::detail::warp_size); if (d_strings.is_null(str_idx)) { d_counts[str_idx] = 0; return; } auto const d_str = d_strings.element<cudf::string_view>(str_idx); if (d_str.empty()) { d_counts[str_idx] = 0; return; } auto const offsets = d_strings.child(cudf::strings_column_view::offsets_column_index).data<cudf::size_type>(); auto const offset = offsets[str_idx + d_strings.offset()] - offsets[d_strings.offset()]; auto const chars_begin = d_strings.child(cudf::strings_column_view::chars_column_index).data<char>() + offsets[d_strings.offset()]; auto const begin = d_str.data(); auto const end = begin + d_str.size_bytes(); auto const d_output = d_results + offset; auto const d_output_end = d_output + d_str.size_bytes(); using warp_reduce = cub::WarpReduce<cudf::size_type>; __shared__ typename warp_reduce::TempStorage warp_storage; cudf::size_type count = 0; if (lane_idx == 0) { cudf::char_utf8 chr = 0; auto ch_size = cudf::strings::detail::to_char_utf8(begin, chr); auto output = 1; if (begin > chars_begin) { auto ptr = begin - 1; while (ptr > chars_begin && cudf::strings::detail::is_utf8_continuation_char(*ptr)) { --ptr; } cudf::strings::detail::to_char_utf8(ptr, chr); output = !is_delimiter(d_delimiter, chr); } auto ptr = d_output; while (ch_size > 0) { *ptr++ = output; --ch_size; } count = ((begin + ch_size) == end); } __syncwarp(); for (auto itr = d_output + lane_idx + 1; itr < d_output_end; itr += cudf::detail::warp_size) { // add one if at the edge of a token or if at the string's end if (*itr) { count += !(*(itr - 1)); } else { count += (itr + 1 == d_output_end); } } __syncwarp(); // add up the counts from the other threads to compute the total token count for this string auto const total_count = warp_reduce(warp_storage).Reduce(count, cub::Sum()); if (lane_idx == 0) { d_counts[str_idx] = total_count; } } /** * @brief Tokenizes each string and uses the map to assign token id values * * @tparam MapRefType Type of the static_map reference for calling find() */ template <typename MapRefType> struct vocabulary_tokenizer_fn { cudf::column_device_view const d_strings; cudf::string_view const d_delimiter; MapRefType d_map; cudf::size_type const default_id; cudf::size_type const* d_offsets; cudf::size_type* d_results; __device__ void operator()(cudf::size_type idx) const { if (d_strings.is_null(idx)) { return; } auto const d_str = d_strings.element<cudf::string_view>(idx); characters_tokenizer tokenizer(d_str, d_delimiter); auto d_tokens = d_results + d_offsets[idx]; cudf::size_type token_idx = 0; while (tokenizer.next_token()) { auto const pos = tokenizer.token_byte_positions(); auto const token = cudf::string_view{d_str.data() + pos.first, (pos.second - pos.first)}; // lookup token in map auto const itr = d_map.find(token); auto const id = (itr != d_map.end()) ? itr->second : default_id; // set value into the output d_tokens[token_idx++] = id; } } }; template <typename MapRefType> struct transform_tokenizer_fn { cudf::string_view const d_delimiter; MapRefType d_map; cudf::size_type const default_id; __device__ cudf::size_type operator()(cudf::string_view d_str) const { auto const begin = d_str.data(); auto const end = begin + d_str.size_bytes(); auto itr = begin; while (itr < end) { cudf::char_utf8 chr = 0; auto const ch_size = cudf::strings::detail::to_char_utf8(itr, chr); if (!is_delimiter(d_delimiter, chr)) break; itr += ch_size; } auto const size = static_cast<cudf::size_type>(thrust::distance(itr, end)); auto const token = cudf::string_view{itr, size}; // lookup token in map auto const fitr = d_map.find(token); return (fitr != d_map.end()) ? fitr->second : default_id; } }; } // namespace std::unique_ptr<cudf::column> tokenize_with_vocabulary(cudf::strings_column_view const& input, tokenize_vocabulary const& vocabulary, cudf::string_scalar const& delimiter, cudf::size_type default_id, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { CUDF_EXPECTS(delimiter.is_valid(stream), "Parameter delimiter must be valid"); auto const output_type = cudf::data_type{cudf::type_to_id<cudf::size_type>()}; if (input.size() == input.null_count()) { return cudf::make_empty_column(output_type); } // count the tokens per string and build the offsets from the counts auto const d_strings = cudf::column_device_view::create(input.parent(), stream); auto const d_delimiter = delimiter.value(stream); auto map_ref = vocabulary._impl->get_map_ref(); auto const zero_itr = thrust::make_counting_iterator<cudf::size_type>(0); if ((input.chars_size() / (input.size() - input.null_count())) < AVG_CHAR_BYTES_THRESHOLD) { auto const sizes_itr = cudf::detail::make_counting_transform_iterator(0, strings_tokenizer{*d_strings, d_delimiter}); auto [token_offsets, total_count] = cudf::detail::make_offsets_child_column(sizes_itr, sizes_itr + input.size(), stream, mr); // build the output column to hold all the token ids auto tokens = cudf::make_numeric_column( output_type, total_count, cudf::mask_state::UNALLOCATED, stream, mr); auto d_tokens = tokens->mutable_view().data<cudf::size_type>(); auto d_offsets = token_offsets->view().data<cudf::size_type>(); vocabulary_tokenizer_fn<decltype(map_ref)> tokenizer{ *d_strings, d_delimiter, map_ref, default_id, d_offsets, d_tokens}; thrust::for_each_n(rmm::exec_policy(stream), zero_itr, input.size(), tokenizer); return cudf::make_lists_column(input.size(), std::move(token_offsets), std::move(tokens), input.null_count(), cudf::detail::copy_bitmask(input.parent(), stream, mr), stream, mr); } // longer strings perform better with warp-parallel approach auto const first_offset = (input.offset() == 0) ? 0 : cudf::detail::get_value<cudf::size_type>( input.offsets(), input.offset(), stream); auto const last_offset = (input.offset() == 0 && input.size() == input.offsets().size() - 1) ? input.chars().size() : cudf::detail::get_value<cudf::size_type>( input.offsets(), input.size() + input.offset(), stream); auto const chars_size = last_offset - first_offset; auto const d_input_chars = input.chars().data<char>() + first_offset; rmm::device_uvector<cudf::size_type> d_token_counts(input.size(), stream); rmm::device_uvector<int8_t> d_marks(chars_size, stream); // mark position of all delimiters thrust::for_each_n(rmm::exec_policy(stream), zero_itr, chars_size, mark_delimiters_fn{d_input_chars, d_delimiter, d_marks.data()}); // launch warp per string to compute token counts cudf::detail::grid_1d grid{input.size() * cudf::detail::warp_size, block_size}; token_counts_fn<<<grid.num_blocks, grid.num_threads_per_block, 0, stream.value()>>>( *d_strings, d_delimiter, d_token_counts.data(), d_marks.data()); auto [token_offsets, total_count] = cudf::detail::make_offsets_child_column( d_token_counts.begin(), d_token_counts.end(), stream, mr); rmm::device_uvector<cudf::size_type> d_tmp_offsets(total_count + 1, stream); d_tmp_offsets.set_element(total_count, chars_size, stream); thrust::copy_if(rmm::exec_policy(stream), zero_itr, thrust::counting_iterator<cudf::size_type>(chars_size), d_tmp_offsets.begin(), [d_marks = d_marks.data()] __device__(auto idx) { if (idx == 0) return true; return d_marks[idx] && !d_marks[idx - 1]; }); auto tmp_offsets = std::make_unique<cudf::column>(std::move(d_tmp_offsets), rmm::device_buffer{}, 0); auto tmp_chars = cudf::column_view(input.chars().type(), chars_size, d_input_chars, nullptr, 0); auto const tmp_input = cudf::column_view( input.parent().type(), total_count, nullptr, nullptr, 0, 0, {tmp_offsets->view(), tmp_chars}); auto const d_tmp_strings = cudf::column_device_view::create(tmp_input, stream); auto tokens = cudf::make_numeric_column(output_type, total_count, cudf::mask_state::UNALLOCATED, stream, mr); auto d_tokens = tokens->mutable_view().data<cudf::size_type>(); transform_tokenizer_fn<decltype(map_ref)> tokenizer{d_delimiter, map_ref, default_id}; thrust::transform(rmm::exec_policy(stream), d_tmp_strings->begin<cudf::string_view>(), d_tmp_strings->end<cudf::string_view>(), d_tokens, tokenizer); return cudf::make_lists_column(input.size(), std::move(token_offsets), std::move(tokens), input.null_count(), cudf::detail::copy_bitmask(input.parent(), stream, mr), stream, mr); } } // namespace detail std::unique_ptr<cudf::column> tokenize_with_vocabulary(cudf::strings_column_view const& input, tokenize_vocabulary const& vocabulary, cudf::string_scalar const& delimiter, cudf::size_type default_id, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { CUDF_FUNC_RANGE(); return detail::tokenize_with_vocabulary(input, vocabulary, delimiter, default_id, stream, mr); } } // namespace nvtext
0
rapidsai_public_repos/cudf/cpp/src
rapidsai_public_repos/cudf/cpp/src/text/jaccard.cu
/* * Copyright (c) 2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include <nvtext/detail/generate_ngrams.hpp> #include <nvtext/jaccard.hpp> #include <cudf/column/column.hpp> #include <cudf/column/column_device_view.cuh> #include <cudf/column/column_factories.hpp> #include <cudf/detail/null_mask.hpp> #include <cudf/detail/nvtx/ranges.hpp> #include <cudf/detail/utilities/cuda.cuh> #include <cudf/lists/lists_column_view.hpp> #include <cudf/utilities/default_stream.hpp> #include <cudf/utilities/error.hpp> #include <cudf/utilities/span.hpp> #include <rmm/cuda_stream_view.hpp> #include <rmm/exec_policy.hpp> #include <thrust/binary_search.h> #include <thrust/execution_policy.h> #include <thrust/iterator/counting_iterator.h> #include <thrust/transform.h> #include <cub/cub.cuh> namespace nvtext { namespace detail { namespace { /** * @brief Retrieve the row data (span) for the given column/row-index * * @param d_input Input lists column * @param idx Row index to retrieve * @return A device-span of the row values */ __device__ auto get_row(cudf::column_device_view const& d_input, cudf::size_type idx) { auto const offsets = d_input.child(cudf::lists_column_view::offsets_column_index).data<cudf::size_type>(); auto const offset = offsets[idx]; auto const size = offsets[idx + 1] - offset; auto const begin = d_input.child(cudf::lists_column_view::child_column_index).data<uint32_t>() + offset; return cudf::device_span<uint32_t const>(begin, size); } /** * @brief Count the unique values within each row of the input column * * This is called with a warp per row */ struct sorted_unique_fn { cudf::column_device_view const d_input; cudf::size_type* d_results; // warp per row __device__ void operator()(cudf::size_type idx) const { using warp_reduce = cub::WarpReduce<cudf::size_type>; __shared__ typename warp_reduce::TempStorage temp_storage; auto const row_idx = idx / cudf::detail::warp_size; auto const lane_idx = idx % cudf::detail::warp_size; auto const row = get_row(d_input, row_idx); auto const begin = row.begin(); cudf::size_type count = 0; for (auto itr = begin + lane_idx; itr < row.end(); itr += cudf::detail::warp_size) { count += (itr == begin || *itr != *(itr - 1)); } auto const result = warp_reduce(temp_storage).Sum(count); if (lane_idx == 0) { d_results[row_idx] = result; } } }; rmm::device_uvector<cudf::size_type> compute_unique_counts(cudf::column_view const& input, rmm::cuda_stream_view stream) { auto const d_input = cudf::column_device_view::create(input, stream); auto d_results = rmm::device_uvector<cudf::size_type>(input.size(), stream); sorted_unique_fn fn{*d_input, d_results.data()}; thrust::for_each_n(rmm::exec_policy(stream), thrust::counting_iterator<cudf::size_type>(0), input.size() * cudf::detail::warp_size, fn); return d_results; } /** * @brief Count the number of common values within each row of the 2 input columns * * This is called with a warp per row */ struct sorted_intersect_fn { cudf::column_device_view const d_input1; cudf::column_device_view const d_input2; cudf::size_type* d_results; // warp per row __device__ float operator()(cudf::size_type idx) const { using warp_reduce = cub::WarpReduce<cudf::size_type>; __shared__ typename warp_reduce::TempStorage temp_storage; auto const row_idx = idx / cudf::detail::warp_size; auto const lane_idx = idx % cudf::detail::warp_size; auto const needles = get_row(d_input1, row_idx); auto const haystack = get_row(d_input2, row_idx); auto begin = haystack.begin(); auto const end = haystack.end(); // TODO: investigate cuCollections device-side static-map to match row values cudf::size_type count = 0; for (auto itr = needles.begin() + lane_idx; itr < needles.end() && begin < end; itr += cudf::detail::warp_size) { if (itr != needles.begin() && *itr == *(itr - 1)) { continue; } // skip duplicates // search haystack for this needle (*itr) auto const found = thrust::lower_bound(thrust::seq, begin, end, *itr); count += (found != end) && (*found == *itr); // increment if found; begin = found; // shorten the next lower-bound range } // sum up the counts across this warp auto const result = warp_reduce(temp_storage).Sum(count); if (lane_idx == 0) { d_results[row_idx] = result; } } }; rmm::device_uvector<cudf::size_type> compute_intersect_counts(cudf::column_view const& input1, cudf::column_view const& input2, rmm::cuda_stream_view stream) { auto const d_input1 = cudf::column_device_view::create(input1, stream); auto const d_input2 = cudf::column_device_view::create(input2, stream); auto d_results = rmm::device_uvector<cudf::size_type>(input1.size(), stream); sorted_intersect_fn fn{*d_input1, *d_input2, d_results.data()}; thrust::for_each_n(rmm::exec_policy(stream), thrust::counting_iterator<cudf::size_type>(0), input1.size() * cudf::detail::warp_size, fn); return d_results; } /** * @brief Compute the jaccard distance for each row * * Formula is J = |A ∩ B| / |A ∪ B| * = |A ∩ B| / (|A| + |B| - |A ∩ B|) * * where |A ∩ B| is number of common values between A and B * and |x| is the number of unique values in x. */ struct jaccard_fn { cudf::size_type const* d_uniques1; cudf::size_type const* d_uniques2; cudf::size_type const* d_intersects; __device__ float operator()(cudf::size_type idx) const { auto const count1 = d_uniques1[idx]; auto const count2 = d_uniques2[idx]; auto const intersects = d_intersects[idx]; // the intersect values are in both sets so a union count // would need to subtract the intersect count from one set // (see formula in comment above) auto const unions = count1 + count2 - intersects; return unions ? (static_cast<float>(intersects) / static_cast<float>(unions)) : 0.f; } }; /** * @brief Create hashes for each substring * * Uses the hash_character_ngrams to hash substrings of the input column. * This returns a lists column where each row is the hashes for the substrings * of the corresponding input string row. * * The hashes are then sorted using a segmented-sort as setup to * perform the unique and intersect operations. */ std::unique_ptr<cudf::column> hash_substrings(cudf::strings_column_view const& col, cudf::size_type width, rmm::cuda_stream_view stream) { auto hashes = hash_character_ngrams(col, width, stream, rmm::mr::get_current_device_resource()); auto const input = cudf::lists_column_view(hashes->view()); auto const offsets = input.offsets_begin(); auto const data = input.child().data<uint32_t>(); rmm::device_uvector<uint32_t> sorted(input.child().size(), stream); // this is wicked fast and much faster than using cudf::lists::detail::sort_list rmm::device_buffer d_temp_storage; size_t temp_storage_bytes = 0; cub::DeviceSegmentedSort::SortKeys(d_temp_storage.data(), temp_storage_bytes, data, sorted.data(), sorted.size(), input.size(), offsets, offsets + 1, stream.value()); d_temp_storage = rmm::device_buffer{temp_storage_bytes, stream}; cub::DeviceSegmentedSort::SortKeys(d_temp_storage.data(), temp_storage_bytes, data, sorted.data(), sorted.size(), input.size(), offsets, offsets + 1, stream.value()); auto contents = hashes->release(); // the offsets are taken from the hashes column since they are the same // before and after the segmented-sort return cudf::make_lists_column( col.size(), std::move(contents.children.front()), std::make_unique<cudf::column>(std::move(sorted), rmm::device_buffer{}, 0), 0, rmm::device_buffer{}, stream, rmm::mr::get_current_device_resource()); } } // namespace std::unique_ptr<cudf::column> jaccard_index(cudf::strings_column_view const& input1, cudf::strings_column_view const& input2, cudf::size_type width, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { CUDF_EXPECTS( input1.size() == input2.size(), "input columns must be the same size", std::invalid_argument); CUDF_EXPECTS(width >= 2, "Parameter width should be an integer value of 2 or greater", std::invalid_argument); constexpr auto output_type = cudf::data_type{cudf::type_id::FLOAT32}; if (input1.is_empty()) { return cudf::make_empty_column(output_type); } auto const [d_uniques1, d_uniques2, d_intersects] = [&] { // build hashes of the substrings auto const hash1 = hash_substrings(input1, width, stream); auto const hash2 = hash_substrings(input2, width, stream); // compute the unique counts in each set and the intersection counts auto d_uniques1 = compute_unique_counts(hash1->view(), stream); auto d_uniques2 = compute_unique_counts(hash2->view(), stream); auto d_intersects = compute_intersect_counts(hash1->view(), hash2->view(), stream); return std::tuple{std::move(d_uniques1), std::move(d_uniques2), std::move(d_intersects)}; }(); auto results = cudf::make_numeric_column( output_type, input1.size(), cudf::mask_state::UNALLOCATED, stream, mr); auto d_results = results->mutable_view().data<float>(); // compute the jaccard using the unique counts and the intersect counts thrust::transform(rmm::exec_policy(stream), thrust::counting_iterator<cudf::size_type>(0), thrust::counting_iterator<cudf::size_type>(results->size()), d_results, jaccard_fn{d_uniques1.data(), d_uniques2.data(), d_intersects.data()}); if (input1.null_count() || input2.null_count()) { auto [null_mask, null_count] = cudf::detail::bitmask_and(cudf::table_view({input1.parent(), input2.parent()}), stream, mr); results->set_null_mask(null_mask, null_count); } return results; } } // namespace detail std::unique_ptr<cudf::column> jaccard_index(cudf::strings_column_view const& input1, cudf::strings_column_view const& input2, cudf::size_type width, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { CUDF_FUNC_RANGE(); return detail::jaccard_index(input1, input2, width, stream, mr); } } // namespace nvtext
0
rapidsai_public_repos/cudf/cpp/src
rapidsai_public_repos/cudf/cpp/src/text/edit_distance.cu
/* * Copyright (c) 2020-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include <nvtext/edit_distance.hpp> #include <cudf/column/column.hpp> #include <cudf/column/column_device_view.cuh> #include <cudf/column/column_factories.hpp> #include <cudf/column/column_view.hpp> #include <cudf/detail/nvtx/ranges.hpp> #include <cudf/strings/string_view.cuh> #include <cudf/strings/strings_column_view.hpp> #include <cudf/utilities/default_stream.hpp> #include <rmm/cuda_stream_view.hpp> #include <rmm/device_uvector.hpp> #include <rmm/exec_policy.hpp> #include <thrust/execution_policy.h> #include <thrust/for_each.h> #include <thrust/functional.h> #include <thrust/iterator/counting_iterator.h> #include <thrust/reduce.h> #include <thrust/scan.h> #include <thrust/sequence.h> #include <thrust/transform.h> #include <thrust/transform_scan.h> namespace nvtext { namespace detail { namespace { /** * @brief Compute the Levenshtein distance for each string pair * * Documentation here: https://www.cuelogic.com/blog/the-levenshtein-algorithm * And here: https://en.wikipedia.org/wiki/Levenshtein_distance * * @param d_str First string * @param d_tgt Second string * @param buffer Working buffer for intermediate calculations * @return The edit distance value */ __device__ cudf::size_type compute_distance(cudf::string_view const& d_str, cudf::string_view const& d_tgt, cudf::size_type* buffer) { auto const str_length = d_str.length(); auto const tgt_length = d_tgt.length(); if (str_length == 0) return tgt_length; if (tgt_length == 0) return str_length; auto begin = str_length < tgt_length ? d_str.begin() : d_tgt.begin(); auto itr = str_length < tgt_length ? d_tgt.begin() : d_str.begin(); // .first is min and .second is max auto const [n, m] = std::minmax(str_length, tgt_length); // setup compute buffer pointers auto v0 = buffer; auto v1 = v0 + n + 1; // initialize v0 thrust::sequence(thrust::seq, v0, v1); for (int i = 0; i < m; ++i, ++itr) { auto itr_tgt = begin; v1[0] = i + 1; for (int j = 0; j < n; ++j, ++itr_tgt) { auto sub_cost = v0[j] + (*itr != *itr_tgt); auto del_cost = v0[j + 1] + 1; auto ins_cost = v1[j] + 1; v1[j + 1] = std::min(std::min(sub_cost, del_cost), ins_cost); } thrust::swap(v0, v1); } return v0[n]; } struct edit_distance_levenshtein_algorithm { cudf::column_device_view d_strings; // computing these cudf::column_device_view d_targets; // against these; cudf::size_type* d_buffer; // compute buffer for each string std::ptrdiff_t const* d_offsets; // locate sub-buffer for each string cudf::size_type* d_results; // edit distance values __device__ void operator()(cudf::size_type idx) const { auto d_str = d_strings.is_null(idx) ? cudf::string_view{} : d_strings.element<cudf::string_view>(idx); auto d_tgt = [&] __device__ { // d_targets is also allowed to have only one entry if (d_targets.is_null(idx)) { return cudf::string_view{}; } return d_targets.size() == 1 ? d_targets.element<cudf::string_view>(0) : d_targets.element<cudf::string_view>(idx); }(); d_results[idx] = compute_distance(d_str, d_tgt, d_buffer + d_offsets[idx]); } }; struct edit_distance_matrix_levenshtein_algorithm { cudf::column_device_view d_strings; // computing these against itself cudf::size_type* d_buffer; // compute buffer for each string std::ptrdiff_t const* d_offsets; // locate sub-buffer for each string cudf::size_type* d_results; // edit distance values __device__ void operator()(cudf::size_type idx) const { auto const strings_count = d_strings.size(); auto const row = idx / strings_count; auto const col = idx % strings_count; if (row > col) return; // bottom half is computed with the top half of matrix cudf::string_view d_str1 = d_strings.is_null(row) ? cudf::string_view{} : d_strings.element<cudf::string_view>(row); cudf::string_view d_str2 = d_strings.is_null(col) ? cudf::string_view{} : d_strings.element<cudf::string_view>(col); auto work_buffer = d_buffer + d_offsets[idx - ((row + 1) * (row + 2)) / 2]; auto const distance = (row == col) ? 0 : compute_distance(d_str1, d_str2, work_buffer); d_results[idx] = distance; // top half of matrix d_results[col * strings_count + row] = distance; // bottom half of matrix } }; } // namespace /** * @copydoc nvtext::edit_distance */ std::unique_ptr<cudf::column> edit_distance(cudf::strings_column_view const& strings, cudf::strings_column_view const& targets, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { auto const strings_count = strings.size(); if (strings_count == 0) { return cudf::make_empty_column(cudf::data_type{cudf::type_to_id<cudf::size_type>()}); } if (targets.size() > 1) { CUDF_EXPECTS(strings_count == targets.size(), "targets.size() must equal strings.size()"); } // create device columns from the input columns auto strings_column = cudf::column_device_view::create(strings.parent(), stream); auto d_strings = *strings_column; auto targets_column = cudf::column_device_view::create(targets.parent(), stream); auto d_targets = *targets_column; // calculate the size of the compute-buffer; rmm::device_uvector<std::ptrdiff_t> offsets(strings_count, stream); thrust::transform(rmm::exec_policy(stream), thrust::make_counting_iterator<cudf::size_type>(0), thrust::make_counting_iterator<cudf::size_type>(strings_count), offsets.begin(), [d_strings, d_targets] __device__(auto idx) { if (d_strings.is_null(idx) || d_targets.is_null(idx)) { return cudf::size_type{0}; } auto d_str = d_strings.element<cudf::string_view>(idx); auto d_tgt = d_targets.size() == 1 ? d_targets.element<cudf::string_view>(0) : d_targets.element<cudf::string_view>(idx); // just need 2 integers for each character of the shorter string return (std::min(d_str.length(), d_tgt.length()) + 1) * 2; }); // get the total size of the temporary compute buffer int64_t compute_size = thrust::reduce(rmm::exec_policy(stream), offsets.begin(), offsets.end(), int64_t{0}); // convert sizes to offsets in-place thrust::exclusive_scan(rmm::exec_policy(stream), offsets.begin(), offsets.end(), offsets.begin()); // create the temporary compute buffer rmm::device_uvector<cudf::size_type> compute_buffer(compute_size, stream); auto d_buffer = compute_buffer.data(); auto results = cudf::make_fixed_width_column(cudf::data_type{cudf::type_to_id<cudf::size_type>()}, strings_count, rmm::device_buffer{0, stream, mr}, 0, stream, mr); auto d_results = results->mutable_view().data<cudf::size_type>(); // compute the edit distance into the output column thrust::for_each_n( rmm::exec_policy(stream), thrust::make_counting_iterator<cudf::size_type>(0), strings_count, edit_distance_levenshtein_algorithm{d_strings, d_targets, d_buffer, offsets.data(), d_results}); return results; } /** * @copydoc nvtext::edit_distance_matrix */ std::unique_ptr<cudf::column> edit_distance_matrix(cudf::strings_column_view const& strings, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { cudf::size_type strings_count = strings.size(); if (strings_count == 0) { return cudf::make_empty_column(cudf::data_type{cudf::type_to_id<cudf::size_type>()}); } CUDF_EXPECTS(strings_count > 1, "the input strings must include at least 2 strings"); CUDF_EXPECTS(static_cast<size_t>(strings_count) * static_cast<size_t>(strings_count) < static_cast<std::size_t>(std::numeric_limits<cudf::size_type>().max()), "too many strings to create the output column"); // create device column of the input strings column auto strings_column = cudf::column_device_view::create(strings.parent(), stream); auto d_strings = *strings_column; // Calculate the size of the compute-buffer. // We only need memory for half the size of the output matrix since the edit distance calculation // is commutative -- `distance(strings[i],strings[j]) == distance(strings[j],strings[i])` cudf::size_type n_upper = (strings_count * (strings_count - 1)) / 2; rmm::device_uvector<std::ptrdiff_t> offsets(n_upper, stream); auto d_offsets = offsets.data(); CUDF_CUDA_TRY(cudaMemsetAsync(d_offsets, 0, n_upper * sizeof(std::ptrdiff_t), stream.value())); thrust::for_each_n( rmm::exec_policy(stream), thrust::make_counting_iterator<cudf::size_type>(0), strings_count * strings_count, [d_strings, d_offsets, strings_count] __device__(cudf::size_type idx) { auto const row = idx / strings_count; auto const col = idx % strings_count; if (row >= col) return; // compute only the top half cudf::string_view const d_str1 = d_strings.is_null(row) ? cudf::string_view{} : d_strings.element<cudf::string_view>(row); cudf::string_view const d_str2 = d_strings.is_null(col) ? cudf::string_view{} : d_strings.element<cudf::string_view>(col); if (d_str1.empty() || d_str2.empty()) { return; } // the temp size needed is 2 integers per character of the shorter string d_offsets[idx - ((row + 1) * (row + 2)) / 2] = (std::min(d_str1.length(), d_str2.length()) + 1) * 2; }); // get the total size for the compute buffer int64_t compute_size = thrust::reduce(rmm::exec_policy(stream), offsets.begin(), offsets.end(), int64_t{0}); // convert sizes to offsets in-place thrust::exclusive_scan(rmm::exec_policy(stream), offsets.begin(), offsets.end(), offsets.begin()); // create the compute buffer rmm::device_uvector<cudf::size_type> compute_buffer(compute_size, stream); auto d_buffer = compute_buffer.data(); // compute the edit distance into the output column auto results = cudf::make_fixed_width_column(cudf::data_type{cudf::type_to_id<cudf::size_type>()}, strings_count * strings_count, rmm::device_buffer{0, stream, mr}, 0, stream, mr); auto d_results = results->mutable_view().data<cudf::size_type>(); thrust::for_each_n( rmm::exec_policy(stream), thrust::make_counting_iterator<cudf::size_type>(0), strings_count * strings_count, edit_distance_matrix_levenshtein_algorithm{d_strings, d_buffer, d_offsets, d_results}); // build a lists column of the results auto offsets_column = cudf::make_fixed_width_column(cudf::data_type{cudf::type_to_id<cudf::size_type>()}, strings_count + 1, rmm::device_buffer{0, stream, mr}, 0, stream, mr); thrust::transform_exclusive_scan( rmm::exec_policy(stream), thrust::counting_iterator<cudf::size_type>(0), thrust::counting_iterator<cudf::size_type>(strings_count + 1), offsets_column->mutable_view().data<cudf::size_type>(), [strings_count] __device__(auto idx) { return strings_count; }, cudf::size_type{0}, thrust::plus<cudf::size_type>()); return cudf::make_lists_column(strings_count, std::move(offsets_column), std::move(results), 0, // no nulls rmm::device_buffer{0, stream, mr}, stream, mr); } } // namespace detail // external APIs /** * @copydoc nvtext::edit_distance */ std::unique_ptr<cudf::column> edit_distance(cudf::strings_column_view const& input, cudf::strings_column_view const& targets, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { CUDF_FUNC_RANGE(); return detail::edit_distance(input, targets, stream, mr); } /** * @copydoc nvtext::edit_distance_matrix */ std::unique_ptr<cudf::column> edit_distance_matrix(cudf::strings_column_view const& input, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { CUDF_FUNC_RANGE(); return detail::edit_distance_matrix(input, stream, mr); } } // namespace nvtext
0
rapidsai_public_repos/cudf/cpp/src/text
rapidsai_public_repos/cudf/cpp/src/text/bpe/byte_pair_encoding.cuh
/* * Copyright (c) 2022-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #pragma once #include <nvtext/byte_pair_encoding.hpp> #include <cudf/column/column.hpp> #include <cudf/column/column_device_view.cuh> #include <cudf/hashing/detail/hash_allocator.cuh> #include <cudf/hashing/detail/hashing.hpp> #include <cudf/hashing/detail/murmurhash3_x86_32.cuh> #include <cudf/strings/string_view.cuh> #include <rmm/cuda_stream_view.hpp> #include <rmm/device_uvector.hpp> #include <rmm/mr/device/polymorphic_allocator.hpp> #include <cuco/static_map.cuh> #include <thrust/distance.h> #include <thrust/execution_policy.h> #include <thrust/find.h> #include <thrust/pair.h> #include <cstdint> #include <type_traits> namespace nvtext { namespace detail { using string_hasher_type = cudf::hashing::detail::MurmurHash3_x86_32<cudf::string_view>; using hash_value_type = string_hasher_type::result_type; using merge_pair_type = thrust::pair<cudf::string_view, cudf::string_view>; using hash_table_allocator_type = rmm::mr::stream_allocator_adaptor<default_allocator<char>>; /** * @brief Hasher function used for building and using the cuco static-map * * This takes advantage of heterogeneous lookup feature in cuco static-map which * allows inserting with one type (index) and looking up with a different type (merge_pair_type). * * The merge-pairs are in adjacent rows so each index will access two rows of string values. * The hash of each string is combined for the returned result. */ struct bpe_hasher { cudf::column_device_view const d_strings; string_hasher_type hasher{}; // used by insert __device__ hash_value_type operator()(cudf::size_type index) const { index *= 2; auto const lhs = d_strings.element<cudf::string_view>(index); auto const rhs = d_strings.element<cudf::string_view>(index + 1); return cudf::hashing::detail::hash_combine(hasher(lhs), hasher(rhs)); } // used by find __device__ hash_value_type operator()(merge_pair_type const& mp) const { return cudf::hashing::detail::hash_combine(hasher(mp.first), hasher(mp.second)); } }; /** * @brief Equal function used for building and using the cuco static-map * * This takes advantage of heterogeneous lookup feature in cuco static-map which * allows inserting with one type (index) and looking up with a different type (merge_pair_type). * * The merge-pairs are in adjacent rows so each index will access two rows of string values. * All rows from the input merge-pairs are unique. */ struct bpe_equal { cudf::column_device_view const d_strings; // used by insert __device__ bool operator()(cudf::size_type lhs, cudf::size_type rhs) const noexcept { return lhs == rhs; // all rows are unique } // used by find __device__ bool operator()(cudf::size_type lhs, merge_pair_type const& rhs) const noexcept { lhs *= 2; auto const left = d_strings.element<cudf::string_view>(lhs); auto const right = d_strings.element<cudf::string_view>(lhs + 1); return (left == rhs.first) && (right == rhs.second); } }; using bpe_probe_scheme = cuco::experimental::linear_probing<1, bpe_hasher>; using merge_pairs_map_type = cuco::experimental::static_map<cudf::size_type, cudf::size_type, cuco::experimental::extent<std::size_t>, cuda::thread_scope_device, bpe_equal, bpe_probe_scheme, hash_table_allocator_type>; /** * @brief Hasher function used for building and using the cuco static-map * * This takes advantage of heterogeneous lookup feature in cuco static-map which * allows inserting with one type (index) and looking up with a different type (merge_pair_type). * * Each component of the merge-pairs (left and right) are stored individually in the map. */ struct mp_hasher { cudf::column_device_view const d_strings; string_hasher_type hasher{}; // used by insert __device__ hash_value_type operator()(cudf::size_type index) const { auto const d_str = d_strings.element<cudf::string_view>(index); return hasher(d_str); } // used by find __device__ hash_value_type operator()(cudf::string_view const& d_str) const { return hasher(d_str); } }; /** * @brief Equal function used for building and using the cuco static-map * * This takes advantage of heterogeneous lookup feature in cuco static-map which * allows inserting with one type (index) and looking up with a different type (string). */ struct mp_equal { cudf::column_device_view const d_strings; // used by insert __device__ bool operator()(cudf::size_type lhs, cudf::size_type rhs) const noexcept { auto const left = d_strings.element<cudf::string_view>(lhs); auto const right = d_strings.element<cudf::string_view>(rhs); return left == right; } // used by find __device__ bool operator()(cudf::size_type lhs, cudf::string_view const& rhs) const noexcept { auto const left = d_strings.element<cudf::string_view>(lhs); return left == rhs; } }; using mp_probe_scheme = cuco::experimental::linear_probing<1, mp_hasher>; using mp_table_map_type = cuco::experimental::static_map<cudf::size_type, cudf::size_type, cuco::experimental::extent<std::size_t>, cuda::thread_scope_device, mp_equal, mp_probe_scheme, hash_table_allocator_type>; } // namespace detail // since column_device_view::create() returns is a little more than // std::unique_ptr<column_device_view> this helper simplifies the return type for us using col_device_view = std::invoke_result_t<decltype(&cudf::column_device_view::create), cudf::column_view, rmm::cuda_stream_view>; struct bpe_merge_pairs::bpe_merge_pairs_impl { std::unique_ptr<cudf::column> const merge_pairs; col_device_view const d_merge_pairs; std::unique_ptr<detail::merge_pairs_map_type> merge_pairs_map; // for BPE std::unique_ptr<detail::mp_table_map_type> mp_table_map; // for locating unpairables bpe_merge_pairs_impl(std::unique_ptr<cudf::column>&& merge_pairs, col_device_view&& d_merge_pairs, std::unique_ptr<detail::merge_pairs_map_type>&& merge_pairs_map, std::unique_ptr<detail::mp_table_map_type>&& mp_table_map); auto const get_merge_pairs() const { return *d_merge_pairs; } auto get_merge_pairs_ref() const { return merge_pairs_map->ref(cuco::experimental::op::find); } auto get_mp_table_ref() const { return mp_table_map->ref(cuco::experimental::op::find); } }; } // namespace nvtext
0
rapidsai_public_repos/cudf/cpp/src/text
rapidsai_public_repos/cudf/cpp/src/text/bpe/load_merge_pairs.cu
/* * Copyright (c) 2022-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include <text/bpe/byte_pair_encoding.cuh> #include <nvtext/byte_pair_encoding.hpp> #include <cudf/column/column_factories.hpp> #include <cudf/detail/iterator.cuh> #include <cudf/detail/nvtx/ranges.hpp> #include <cudf/detail/utilities/vector_factories.hpp> #include <cudf/strings/split/split.hpp> #include <cudf/utilities/default_stream.hpp> #include <cudf/utilities/error.hpp> #include <rmm/cuda_stream_view.hpp> #include <rmm/device_uvector.hpp> #include <fstream> #include <iostream> #include <vector> namespace nvtext { namespace detail { namespace { /** * @brief Loads a text file of merge-pairs into a strings column. * * The line position in the file indicates the pair's rank. * * @code{.pseudo} * Format of the file: * #version .. * a1 a2 * b1 b2 * c1 c2 * ... * @endcode * * @param filename_merges Path to text file containing merge-pairs * @return object containing table elements for the BPE function */ std::unique_ptr<cudf::column> load_file_to_column(std::string const& filename_merges, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { std::ifstream merges_file(filename_merges); CUDF_EXPECTS(merges_file.good(), "Could not open " + filename_merges); std::vector<char> chars{}; std::vector<cudf::size_type> offsets(1, 0); std::string line; std::getline(merges_file, line); std::string version = "#version"; if (line.substr(0, version.size()).compare(version) == 0) { std::getline(merges_file, line); } // This is a text file delimited only by CR/LF. // TODO: Look into using the CSV reader to load the strings column instead. while (!line.empty()) { chars.insert(chars.end(), std::cbegin(line), std::cend(line)); offsets.push_back(offsets.back() + line.length()); std::getline(merges_file, line); } CUDF_EXPECTS(!chars.empty(), "No data found in " + filename_merges); auto d_chars = cudf::detail::make_device_uvector_async(chars, stream, mr); auto d_offsets = cudf::detail::make_device_uvector_async(offsets, stream, mr); return cudf::make_strings_column(d_chars, d_offsets, {}, 0); } std::unique_ptr<detail::merge_pairs_map_type> initialize_merge_pairs_map( cudf::column_device_view const& input, rmm::cuda_stream_view stream) { auto merge_pairs_map = std::make_unique<merge_pairs_map_type>( static_cast<size_t>(input.size()), cuco::empty_key{-1}, cuco::empty_value{-1}, bpe_equal{input}, bpe_probe_scheme{bpe_hasher{input}}, hash_table_allocator_type{default_allocator<char>{}, stream}, stream.value()); auto iter = cudf::detail::make_counting_transform_iterator( 0, [] __device__(cudf::size_type idx) { return cuco::make_pair(idx, idx); }); merge_pairs_map->insert_async(iter, iter + (input.size() / 2), stream.value()); return merge_pairs_map; } std::unique_ptr<detail::mp_table_map_type> initialize_mp_table_map( cudf::column_device_view const& input, rmm::cuda_stream_view stream) { auto mp_table_map = std::make_unique<mp_table_map_type>( static_cast<size_t>(input.size()), cuco::empty_key{-1}, cuco::empty_value{-1}, mp_equal{input}, mp_probe_scheme{mp_hasher{input}}, hash_table_allocator_type{default_allocator<char>{}, stream}, stream.value()); auto iter = cudf::detail::make_counting_transform_iterator( 0, [] __device__(cudf::size_type idx) { return cuco::make_pair(idx, idx); }); mp_table_map->insert_async(iter, iter + input.size(), stream.value()); return mp_table_map; } std::unique_ptr<bpe_merge_pairs::bpe_merge_pairs_impl> create_bpe_merge_pairs_impl( std::unique_ptr<cudf::column>&& input, rmm::cuda_stream_view stream) { auto d_input = cudf::column_device_view::create(input->view(), stream); auto merge_pairs = initialize_merge_pairs_map(*d_input, stream); auto mp_table_map = initialize_mp_table_map(*d_input, stream); return std::make_unique<nvtext::bpe_merge_pairs::bpe_merge_pairs_impl>( std::move(input), std::move(d_input), std::move(merge_pairs), std::move(mp_table_map)); } std::unique_ptr<bpe_merge_pairs::bpe_merge_pairs_impl> create_bpe_merge_pairs_impl( cudf::strings_column_view const& input, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { auto pairs = cudf::strings::split_record(input, cudf::string_scalar(" "), 1, stream, mr); auto content = pairs->release(); return create_bpe_merge_pairs_impl(std::move(content.children.back()), stream); } } // namespace std::unique_ptr<bpe_merge_pairs> load_merge_pairs_file(std::string const& filename_merges, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { auto input_column = load_file_to_column(filename_merges, stream, mr); return std::make_unique<bpe_merge_pairs>(std::move(input_column), stream, mr); } std::unique_ptr<bpe_merge_pairs> load_merge_pairs(cudf::strings_column_view const& merge_pairs, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { CUDF_EXPECTS(!merge_pairs.is_empty(), "Merge pairs must not be empty"); CUDF_EXPECTS(!merge_pairs.has_nulls(), "Merge pairs may not contain nulls"); return std::make_unique<bpe_merge_pairs>(merge_pairs, stream, mr); } } // namespace detail std::unique_ptr<bpe_merge_pairs> load_merge_pairs_file(std::string const& filename_merges, rmm::mr::device_memory_resource* mr) { CUDF_FUNC_RANGE(); return detail::load_merge_pairs_file(filename_merges, cudf::get_default_stream(), mr); } std::unique_ptr<bpe_merge_pairs> load_merge_pairs(cudf::strings_column_view const& merge_pairs, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { CUDF_FUNC_RANGE(); return detail::load_merge_pairs(merge_pairs, stream, mr); } bpe_merge_pairs::bpe_merge_pairs_impl::bpe_merge_pairs_impl( std::unique_ptr<cudf::column>&& merge_pairs, std::unique_ptr<cudf::column_device_view, std::function<void(cudf::column_device_view*)>>&& d_merge_pairs, std::unique_ptr<detail::merge_pairs_map_type>&& merge_pairs_map, std::unique_ptr<detail::mp_table_map_type>&& mp_table_map) : merge_pairs(std::move(merge_pairs)), d_merge_pairs(std::move(d_merge_pairs)), merge_pairs_map(std::move(merge_pairs_map)), mp_table_map(std::move(mp_table_map)) { } bpe_merge_pairs::bpe_merge_pairs(std::unique_ptr<cudf::column>&& input, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource*) : impl(detail::create_bpe_merge_pairs_impl(std::move(input), stream).release()) { } bpe_merge_pairs::bpe_merge_pairs(cudf::strings_column_view const& input, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) : impl(detail::create_bpe_merge_pairs_impl(input, stream, mr).release()) { } bpe_merge_pairs::bpe_merge_pairs() = default; bpe_merge_pairs::~bpe_merge_pairs() { delete impl; } } // namespace nvtext
0
rapidsai_public_repos/cudf/cpp/src/text
rapidsai_public_repos/cudf/cpp/src/text/bpe/byte_pair_encoding.cu
/* * Copyright (c) 2022-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include <text/bpe/byte_pair_encoding.cuh> #include <nvtext/byte_pair_encoding.hpp> #include <cudf/column/column_device_view.cuh> #include <cudf/column/column_factories.hpp> #include <cudf/detail/get_value.cuh> #include <cudf/detail/null_mask.hpp> #include <cudf/detail/nvtx/ranges.hpp> #include <cudf/detail/sizes_to_offsets_iterator.cuh> #include <cudf/detail/utilities/cuda.cuh> #include <cudf/strings/detail/strings_children.cuh> #include <cudf/utilities/default_stream.hpp> #include <cudf/utilities/error.hpp> #include <rmm/cuda_stream_view.hpp> #include <rmm/exec_policy.hpp> #include <thrust/copy.h> #include <thrust/distance.h> #include <thrust/execution_policy.h> #include <thrust/functional.h> #include <thrust/iterator/counting_iterator.h> #include <thrust/iterator/discard_iterator.h> #include <thrust/merge.h> #include <thrust/remove.h> #include <thrust/unique.h> namespace nvtext { namespace detail { namespace { constexpr int block_size = 512; /** * @brief Produces offsets to unpairable locations in the given chars array * * Launched as a thread per byte of the chars array. * The output is non-zero offsets to locations of unpairable substrings. * An unpairable substring does not exist in the given map and so will * never be paired. Fortunately, this can be used as an artificial * boundary providing increased parallelism in the BPE kernel. * * @tparam MapRefType The type of the map finder object */ template <typename MapRefType> struct bpe_unpairable_offsets_fn { cudf::device_span<char const> d_chars; cudf::size_type offset; MapRefType const d_map; __device__ cudf::size_type operator()(cudf::size_type idx) { if (!cudf::strings::detail::is_begin_utf8_char(d_chars[idx])) { return 0; } auto const itr = d_chars.data() + idx; auto const end = d_chars.end(); auto const lhs = cudf::string_view(itr, cudf::strings::detail::bytes_in_utf8_byte(*itr)); auto const next = itr + lhs.size_bytes(); auto output = 0; if (next < end) { auto const rhs = cudf::string_view(next, cudf::strings::detail::bytes_in_utf8_byte(*next)); // see if both halves exist anywhere in the table, if not these are unpairable if (d_map.find(lhs) == d_map.end() && d_map.find(rhs) == d_map.end()) { output = idx + lhs.size_bytes() + offset; // offset for artificial boundary } } return output; } }; /** * @brief Performs byte-pair-encoding * * Computes the locations where the separator will be inserted in `d_spaces_data`. * This is launched as a string per block. * * The process first initializes all characters to 1 per position in `d_spaces_data`. * All pairs are realized and their ranks stored in `d_ranks_data`. * * Iteratively, the minimum rank is located, the corresponding `d_spaces_data` location * is set to 0 resulting in new potential pairs. The process repeats accounting for * the rank of the newly formed pairs. * * Once there are no more rankable pairs, the process finishes and the `d_spaces_data` * values identify the location to insert the separator. * * @tparam MapRefType The type of the map finder object * @param d_strings Input data * @param d_map For looking up individual string candidates * @param d_spaces_data Output the location where separator will be inserted * @param d_ranks_data Working memory to hold pair ranks * @param d_rerank_data Working memory to hold locations where reranking is required */ template <typename MapRefType> __global__ void bpe_parallel_fn(cudf::column_device_view const d_strings, MapRefType const d_map, int8_t* d_spaces_data, // working memory cudf::size_type* d_ranks_data, // more working memory int8_t* d_rerank_data // and one more working memory ) { // string per block auto const str_idx = static_cast<cudf::size_type>(cudf::detail::grid_1d::global_thread_id() / block_size); auto const lane_idx = static_cast<cudf::size_type>(threadIdx.x); auto const d_str = d_strings.element<cudf::string_view>(str_idx); auto const offsets = d_strings.child(cudf::strings_column_view::offsets_column_index).data<cudf::size_type>(); auto const offset = offsets[str_idx + d_strings.offset()] - offsets[d_strings.offset()]; auto const d_spaces = d_spaces_data + offset; auto const end_spaces = d_spaces + d_str.size_bytes(); auto const d_ranks = d_ranks_data + offset; auto const end_ranks = d_ranks + d_str.size_bytes(); auto const d_rerank = d_rerank_data + offset; auto const end_rerank = d_rerank + d_str.size_bytes(); auto constexpr max_rank = cuda::std::numeric_limits<cudf::size_type>::max(); __shared__ cudf::size_type block_min_rank; using block_reduce = cub::BlockReduce<cudf::size_type, block_size>; __shared__ typename block_reduce::TempStorage temp_storage; auto const num_valid = block_size < d_str.size_bytes() ? block_size : d_str.size_bytes(); // init all the re-rank identifiers to zero for (auto itr = d_rerank + lane_idx; itr < end_rerank; itr += block_size) { *itr = 0; } // init all ranks to max for (auto itr = d_ranks + lane_idx; itr < end_ranks; itr += block_size) { *itr = max_rank; } // init all spaces to 1 as appropriate for (auto itr = d_spaces + lane_idx; itr < end_spaces; itr += block_size) { auto const index = thrust::distance(d_spaces, itr); *itr = static_cast<int8_t>(cudf::strings::detail::is_begin_utf8_char(d_str.data()[index])); } __syncthreads(); // for finding the next half of a pair auto next_substr = [d_str, d_spaces, end = end_spaces](int8_t* begin) { auto const next = thrust::find(thrust::seq, begin + 1, end, 1); auto const size = static_cast<cudf::size_type>(thrust::distance(begin, next)); return cudf::string_view(d_str.data() + thrust::distance(d_spaces, begin), size); }; // for locating adjacent pairs after merging a pair auto find_prev = [begin = d_spaces](int8_t* ptr) { while (ptr > begin && *ptr == 0) { --ptr; } return ptr; }; auto min_rank = max_rank; // store all the initial ranks for each pair // every character but the first one will have a initial rank // // Example: // string: abcdefghij // spaces: 1111111111 // ranks: *948516327 for (auto itr = d_spaces + lane_idx; itr < end_spaces; itr += block_size) { if (*itr == 0) { continue; } // skips any UTF-8 continuation bytes // resolve pair and lookup its rank auto const lhs = next_substr(itr); // retrieve lhs of the pair auto const next_itr = itr + lhs.size_bytes(); if (next_itr < end_spaces) { auto const rhs = next_substr(next_itr); // retrieve rhs of the pair if (!rhs.empty()) { auto rank = max_rank; auto const mp = merge_pair_type{lhs, rhs}; auto const map_itr = d_map.find(mp); // lookup pair in merges table; if (map_itr != d_map.end()) { rank = map_itr->second; } // found a match; d_ranks[thrust::distance(d_spaces, next_itr)] = rank; // store the rank if (rank < min_rank) { min_rank = rank; } } } } // compute the min rank across the block auto const reduce_rank = block_reduce(temp_storage).Reduce(min_rank, cub::Min(), num_valid); if (lane_idx == 0) { block_min_rank = reduce_rank; } __syncthreads(); // loop through the ranks processing the current minimum until there are no more while (block_min_rank < max_rank) { // search the d_ranks for matches to block_min_rank for (auto itr = d_ranks + lane_idx; itr < end_ranks; itr += block_size) { if (*itr == block_min_rank) { auto ptr = itr - 1; // check for adjacent min-rank (edge-case) while (ptr > d_ranks && *ptr == max_rank) { --ptr; } // set the output value to 0 at this position (erases separator, merges pair) // using example string above, the min-rank is 1 at position 5 // string: abcdefghij // spaces: 1111101111 (set position 5 to 0) if (*ptr != block_min_rank) { d_spaces[thrust::distance(d_ranks, itr)] = 0; } } } __syncthreads(); // identify all the re-rank locations (logic above invalidated adjacent pairs) // using example string above, the adjacent pairs have to be re-ranked // string: abcdefghij // spaces: 1111101111 (pair 'e,f' is now merged) // rerank: 0000101000 ('ef' and 'fg' need re-ranking as 'd,ef' and 'ef,g' for (auto itr = d_ranks + lane_idx; itr < end_ranks; itr += block_size) { auto const index = thrust::distance(d_ranks, itr); if (*itr == block_min_rank && d_spaces[index] == 0) { // find previous pair mid-point auto ptr = find_prev(d_spaces + index - 1); if (ptr > d_spaces) { d_rerank[thrust::distance(d_spaces, ptr)] = 1; } // find next pair mid-point ptr = thrust::find(thrust::seq, d_spaces + index + 1, end_spaces, 1); if (ptr < end_spaces) { d_rerank[thrust::distance(d_spaces, ptr)] = 1; } *itr = max_rank; // reset this rank } } __syncthreads(); // compute the ranks for the newly created pairs min_rank = max_rank; // and record the new minimum along the way for (auto itr = d_rerank + lane_idx; itr < end_rerank; itr += block_size) { auto const index = thrust::distance(d_rerank, itr); auto rank = d_ranks[index]; if (*itr) { *itr = 0; // reset re-rank // build lhs of pair auto const ptr = find_prev(d_spaces + index - 1); auto const size = static_cast<cudf::size_type>(thrust::distance(ptr, d_spaces + index)); auto const lhs = cudf::string_view(d_str.data() + thrust::distance(d_spaces, ptr), size); auto const rhs = next_substr(d_spaces + index); // retrieve rhs of pair rank = max_rank; if (!rhs.empty()) { auto const mp = merge_pair_type{lhs, rhs}; auto const map_itr = d_map.find(mp); // lookup rank for this pair; if (map_itr != d_map.end()) { rank = map_itr->second; } // found a match } d_ranks[index] = rank; // store new rank } if (rank < min_rank) { min_rank = rank; } } // re-compute the minimum rank across the block (since new pairs are created above) auto const reduce_rank = block_reduce(temp_storage).Reduce(min_rank, cub::Min(), num_valid); if (lane_idx == 0) { block_min_rank = reduce_rank; } __syncthreads(); } // if no min ranks are found we are done, otherwise start again } /** * @brief Computes the output size of each strings row * * This launches as a string per block. * The non-zero values in `d_spaces_data` for each string is added to * the current string size to produce the total output bytes. * * @param d_strings Input data * @param d_spaces_data Output the location where separator will be inserted * @param d_sizes Output sizes of each row */ __global__ void bpe_finalize(cudf::column_device_view const d_strings, int8_t* d_spaces_data, // where separators are inserted cudf::size_type* d_sizes // output sizes of encoded strings ) { // string per block auto const str_idx = static_cast<cudf::size_type>(cudf::detail::grid_1d::global_thread_id() / block_size); auto const lane_idx = static_cast<cudf::size_type>(threadIdx.x); if (d_strings.is_null(str_idx)) { d_sizes[str_idx] = 0; return; } auto const d_str = d_strings.element<cudf::string_view>(str_idx); if (d_str.empty()) { d_sizes[str_idx] = 0; return; } auto const offsets = d_strings.child(cudf::strings_column_view::offsets_column_index).data<cudf::size_type>(); auto const offset = offsets[str_idx + d_strings.offset()] - offsets[d_strings.offset()]; auto const d_spaces = d_spaces_data + offset; auto const end_spaces = d_spaces + d_str.size_bytes(); auto const num_valid = block_size < d_str.size_bytes() ? block_size : d_str.size_bytes(); using block_reduce = cub::BlockReduce<cudf::size_type, block_size>; __shared__ typename block_reduce::TempStorage temp_storage; // reset the first position -- no separator to be added here if (lane_idx == 0) { *d_spaces = 0; } // compute the output size for this string by counting the resulting separator positions auto bytes = 0; for (auto itr = d_spaces + lane_idx; itr < end_spaces; itr += block_size) { bytes += (*itr > 0); } auto const total_bytes = block_reduce(temp_storage).Sum(bytes, num_valid); if (lane_idx == 0) { d_sizes[str_idx] = total_bytes + d_str.size_bytes(); } } } // namespace std::unique_ptr<cudf::column> byte_pair_encoding(cudf::strings_column_view const& input, bpe_merge_pairs const& merge_pairs, cudf::string_scalar const& separator, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { if (input.is_empty() || input.chars_size() == 0) { return cudf::make_empty_column(cudf::type_id::STRING); } CUDF_EXPECTS(separator.is_valid(stream), "separator parameter must be valid"); auto const d_separator = separator.value(stream); CUDF_EXPECTS(d_separator.size_bytes() == 1, "for now, separator must be a single-byte character"); auto const d_strings = cudf::column_device_view::create(input.parent(), stream); auto const first_offset = (input.offset() == 0) ? 0 : cudf::detail::get_value<cudf::size_type>( input.offsets(), input.offset(), stream); auto const last_offset = (input.offset() == 0 && input.size() == input.offsets().size() - 1) ? input.chars().size() : cudf::detail::get_value<cudf::size_type>( input.offsets(), input.size() + input.offset(), stream); auto const chars_size = last_offset - first_offset; auto const d_input_chars = input.chars().data<char>() + first_offset; auto const offset_data_type = cudf::data_type{cudf::type_to_id<cudf::size_type>()}; auto offsets = cudf::make_numeric_column( offset_data_type, input.size() + 1, cudf::mask_state::UNALLOCATED, stream, mr); auto d_offsets = offsets->mutable_view().data<cudf::size_type>(); rmm::device_uvector<int8_t> d_spaces(chars_size, stream); // identifies non-merged pairs // used for various purposes below: unpairable-offsets, pair ranks, separator insert positions rmm::device_uvector<cudf::size_type> d_working(chars_size, stream); auto const chars_begin = thrust::counting_iterator<cudf::size_type>(0); auto const chars_end = thrust::counting_iterator<cudf::size_type>(chars_size); { // this kernel locates unpairable sections of strings to create artificial string row // boundaries; the boundary values are recorded as offsets in d_up_offsets auto const d_up_offsets = d_working.data(); // store unpairable offsets here auto const mp_map = merge_pairs.impl->get_mp_table_ref(); // lookup table auto const d_chars_span = cudf::device_span<char const>(d_input_chars, chars_size); auto up_fn = bpe_unpairable_offsets_fn<decltype(mp_map)>{d_chars_span, first_offset, mp_map}; thrust::transform(rmm::exec_policy_nosync(stream), chars_begin, chars_end, d_up_offsets, up_fn); auto const up_end = // remove all but the unpairable offsets thrust::remove(rmm::exec_policy_nosync(stream), d_up_offsets, d_up_offsets + chars_size, 0); auto const unpairables = thrust::distance(d_up_offsets, up_end); // number of unpairables // new string boundaries created by combining unpairable offsets with the existing offsets auto tmp_offsets = rmm::device_uvector<cudf::size_type>(unpairables + input.size() + 1, stream); thrust::merge(rmm::exec_policy_nosync(stream), input.offsets_begin(), input.offsets_end(), d_up_offsets, up_end, tmp_offsets.begin()); // remove any adjacent duplicate offsets (i.e. empty or null rows) auto const offsets_end = thrust::unique(rmm::exec_policy_nosync(stream), tmp_offsets.begin(), tmp_offsets.end()); auto const offsets_total = static_cast<cudf::size_type>(thrust::distance(tmp_offsets.begin(), offsets_end)); tmp_offsets.resize(offsets_total, stream); // temp column created with the merged offsets and the original chars data auto const col_offsets = cudf::column_view(cudf::device_span<cudf::size_type const>(tmp_offsets)); auto const tmp_size = offsets_total - 1; auto const tmp_input = cudf::column_view( input.parent().type(), tmp_size, nullptr, nullptr, 0, 0, {col_offsets, input.chars()}); auto const d_tmp_strings = cudf::column_device_view::create(tmp_input, stream); // launch the byte-pair-encoding kernel on the temp column rmm::device_uvector<int8_t> d_rerank(chars_size, stream); // more working memory; auto const d_ranks = d_working.data(); // store pair ranks here auto const pair_map = merge_pairs.impl->get_merge_pairs_ref(); bpe_parallel_fn<decltype(pair_map)><<<tmp_size, block_size, 0, stream.value()>>>( *d_tmp_strings, pair_map, d_spaces.data(), d_ranks, d_rerank.data()); } // compute the output sizes and store them in the d_offsets vector bpe_finalize<<<input.size(), block_size, 0, stream.value()>>>( *d_strings, d_spaces.data(), d_offsets); // convert sizes to offsets in-place auto const bytes = cudf::detail::sizes_to_offsets(d_offsets, d_offsets + input.size() + 1, d_offsets, stream); CUDF_EXPECTS(bytes <= static_cast<int64_t>(std::numeric_limits<cudf::size_type>::max()), "Size of output exceeds the column size limit", std::overflow_error); // build the output: inserting separators to the input character data auto chars = cudf::strings::detail::create_chars_child_column(bytes, stream, mr); auto d_chars = chars->mutable_view().data<char>(); auto const d_inserts = d_working.data(); // stores the insert positions auto offsets_at_non_zero = [d_spaces = d_spaces.data()] __device__(auto idx) { return d_spaces[idx] > 0; // separator to be inserted here }; auto const copy_end = thrust::copy_if( rmm::exec_policy_nosync(stream), chars_begin + 1, chars_end, d_inserts, offsets_at_non_zero); // this will insert the single-byte separator into positions specified in d_inserts auto const sep_char = thrust::constant_iterator<char>(separator.to_string(stream)[0]); thrust::merge_by_key(rmm::exec_policy_nosync(stream), d_inserts, // where to insert separator byte copy_end, // chars_begin, // all indices chars_end, // sep_char, // byte to insert d_input_chars, // original data thrust::make_discard_iterator(), d_chars); // result return cudf::make_strings_column(input.size(), std::move(offsets), std::move(chars), input.null_count(), cudf::detail::copy_bitmask(input.parent(), stream, mr)); } } // namespace detail std::unique_ptr<cudf::column> byte_pair_encoding(cudf::strings_column_view const& input, bpe_merge_pairs const& merges_table, cudf::string_scalar const& separator, rmm::mr::device_memory_resource* mr) { CUDF_FUNC_RANGE(); return detail::byte_pair_encoding(input, merges_table, separator, cudf::get_default_stream(), mr); } } // namespace nvtext
0
rapidsai_public_repos/cudf/cpp/src/text
rapidsai_public_repos/cudf/cpp/src/text/subword/load_hash_file.cu
/* * Copyright (c) 2020-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include <text/subword/detail/codepoint_metadata.ah> #include <text/subword/detail/tokenizer_utils.cuh> #include <nvtext/detail/load_hash_file.hpp> #include <cudf/column/column_factories.hpp> #include <cudf/detail/nvtx/ranges.hpp> #include <cudf/strings/detail/utilities.cuh> #include <cudf/utilities/default_stream.hpp> #include <cudf/utilities/error.hpp> #include <rmm/cuda_stream_view.hpp> #include <rmm/device_uvector.hpp> #include <rmm/exec_policy.hpp> #include <thrust/fill.h> #include <algorithm> #include <cstdint> #include <fstream> #include <iostream> #include <vector> namespace nvtext { namespace detail { /** * @brief Retrieve the code point metadata table. * * Build the code point metadata table in device memory * using the vector pieces from codepoint_metadata.ah */ rmm::device_uvector<codepoint_metadata_type> get_codepoint_metadata(rmm::cuda_stream_view stream) { auto table_vector = rmm::device_uvector<codepoint_metadata_type>(codepoint_metadata_size, stream); auto table = table_vector.data(); thrust::fill(rmm::exec_policy(stream), table + cp_section1_end, table + codepoint_metadata_size, codepoint_metadata_default_value); CUDF_CUDA_TRY(cudaMemcpyAsync(table, codepoint_metadata, cp_section1_end * sizeof(codepoint_metadata[0]), // 1st section cudaMemcpyDefault, stream.value())); CUDF_CUDA_TRY(cudaMemcpyAsync( table + cp_section2_begin, cp_metadata_917505_917999, (cp_section2_end - cp_section2_begin + 1) * sizeof(codepoint_metadata[0]), // 2nd section cudaMemcpyDefault, stream.value())); return table_vector; } /** * @brief Retrieve the aux code point data table. * * Build the aux code point data table in device memory * using the vector pieces from codepoint_metadata.ah */ rmm::device_uvector<aux_codepoint_data_type> get_aux_codepoint_data(rmm::cuda_stream_view stream) { auto table_vector = rmm::device_uvector<aux_codepoint_data_type>(aux_codepoint_data_size, stream); auto table = table_vector.data(); thrust::fill(rmm::exec_policy(stream), table + aux_section1_end, table + aux_codepoint_data_size, aux_codepoint_default_value); CUDF_CUDA_TRY(cudaMemcpyAsync(table, aux_codepoint_data, aux_section1_end * sizeof(aux_codepoint_data[0]), // 1st section cudaMemcpyDefault, stream.value())); CUDF_CUDA_TRY(cudaMemcpyAsync( table + aux_section2_begin, aux_cp_data_44032_55203, (aux_section2_end - aux_section2_begin + 1) * sizeof(aux_codepoint_data[0]), // 2nd section cudaMemcpyDefault, stream.value())); CUDF_CUDA_TRY(cudaMemcpyAsync( table + aux_section3_begin, aux_cp_data_70475_71099, (aux_section3_end - aux_section3_begin + 1) * sizeof(aux_codepoint_data[0]), // 3rd section cudaMemcpyDefault, stream.value())); CUDF_CUDA_TRY(cudaMemcpyAsync( table + aux_section4_begin, aux_cp_data_119134_119232, (aux_section4_end - aux_section4_begin + 1) * sizeof(aux_codepoint_data[0]), // 4th section cudaMemcpyDefault, stream.value())); return table_vector; } namespace { /** * @brief Convert string to uint32. * * This just wraps the std::stoi but provides a nice error message * in case the hash file format is incorrect. */ uint32_t str_to_uint32(std::string const& str, uint64_t line_no) { try { return std::stoi(str); // there is no std::stoui } catch (std::exception const& exc) { std::string message("Line "); message += std::to_string(line_no) + ": "; message += "cannot convert integer from '"; message += str; message += "': "; message += exc.what(); std::cerr << message << std::endl; throw; } } /** * @brief Convert string to uint64. * * This just wraps the std::stoul but provides a nice error message * in case the hash file format is incorrect. */ uint64_t str_to_uint64(std::string const& str, uint64_t line_no) { try { return std::stoul(str); } catch (std::exception const& exc) { std::string message("Line "); message += std::to_string(line_no) + ": "; message += "cannot convert integer from '"; message += str; message += "': "; message += exc.what(); std::cerr << message << std::endl; throw; } } } // namespace /** * @brief Loads a text file representing the hashed vocabulary into hashed_vocabulary struct. * * @code{.pseudo} * Format of the file (ASCII text file with numbers): * First 3 lines have the following values: * outer_hash_a * outer_hash_b * number-of-bins * The next number-of-bins lines has two values in each line separated by a space * coefficient offset * ... * Next line has the size (number of lines) of the table followed * by the table values -- one value per line. * The last three lines: * unknown_token_id * first_token_id * separator_token_id * @endcode * * @param filename_hashed_vocabulary Path to text file containing hashed vocabulary * @return object containing hash table elements for the wordpiece tokenizer */ std::unique_ptr<hashed_vocabulary> load_vocabulary_file( std::string const& filename_hashed_vocabulary, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { hashed_vocabulary result; std::ifstream hash_file(filename_hashed_vocabulary); CUDF_EXPECTS(hash_file.good(), "Could not open " + filename_hashed_vocabulary); uint64_t line_no = 1; std::string line; std::getline(hash_file, line); result.outer_hash_a = str_to_uint32(line, line_no++); std::getline(hash_file, line); result.outer_hash_b = str_to_uint32(line, line_no++); std::getline(hash_file, line); result.num_bins = str_to_uint32(line, line_no++); std::vector<uint64_t> bin_coefficients(result.num_bins); std::vector<uint16_t> bin_offsets(result.num_bins); for (int i = 0; i < result.num_bins; ++i) { std::getline(hash_file, line); size_t loc_of_space = line.find(" "); CUDF_EXPECTS(loc_of_space != line.npos, "invalid hash file format"); std::string first_num = line.substr(0, loc_of_space); std::string second_num = line.substr(loc_of_space + 1, line.length()); bin_coefficients[i] = str_to_uint64(first_num, line_no); bin_offsets[i] = str_to_uint32(second_num, line_no); ++line_no; } std::getline(hash_file, line); uint64_t hash_table_length = str_to_uint64(line, line_no++); std::vector<uint64_t> table(hash_table_length); std::generate(table.begin(), table.end(), [&hash_file, &line_no]() { std::string line; std::getline(hash_file, line); return str_to_uint64(line, line_no++); }); std::getline(hash_file, line); result.unknown_token_id = str_to_uint32(line, line_no++); std::getline(hash_file, line); result.first_token_id = str_to_uint32(line, line_no++); std::getline(hash_file, line); result.separator_token_id = str_to_uint32(line, line_no++); // Transfer hash table to columns result.table = cudf::make_numeric_column(cudf::data_type{cudf::type_id::UINT64}, table.size(), cudf::mask_state::UNALLOCATED, stream, mr); CUDF_CUDA_TRY(cudaMemcpyAsync(result.table->mutable_view().data<uint64_t>(), table.data(), table.size() * sizeof(uint64_t), cudaMemcpyDefault, stream.value())); result.bin_coefficients = cudf::make_numeric_column(cudf::data_type{cudf::type_id::UINT64}, bin_coefficients.size(), cudf::mask_state::UNALLOCATED, stream, mr); CUDF_CUDA_TRY(cudaMemcpyAsync(result.bin_coefficients->mutable_view().data<uint64_t>(), bin_coefficients.data(), bin_coefficients.size() * sizeof(uint64_t), cudaMemcpyDefault, stream.value())); result.bin_offsets = cudf::make_numeric_column(cudf::data_type{cudf::type_id::UINT16}, bin_offsets.size(), cudf::mask_state::UNALLOCATED, stream, mr); CUDF_CUDA_TRY(cudaMemcpyAsync(result.bin_offsets->mutable_view().data<uint16_t>(), bin_offsets.data(), bin_offsets.size() * sizeof(uint16_t), cudaMemcpyDefault, stream.value())); auto cp_metadata = detail::get_codepoint_metadata(stream); auto const cp_metadata_size = static_cast<cudf::size_type>(cp_metadata.size()); result.cp_metadata = std::make_unique<cudf::column>(cudf::data_type{cudf::type_id::UINT32}, cp_metadata_size, cp_metadata.release(), rmm::device_buffer{}, 0); auto aux_cp_table = detail::get_aux_codepoint_data(stream); auto const aux_cp_table_size = static_cast<cudf::size_type>(aux_cp_table.size()); result.aux_cp_table = std::make_unique<cudf::column>(cudf::data_type{cudf::type_id::UINT64}, aux_cp_table_size, aux_cp_table.release(), rmm::device_buffer{}, 0); return std::make_unique<hashed_vocabulary>(std::move(result)); } } // namespace detail std::unique_ptr<hashed_vocabulary> load_vocabulary_file( std::string const& filename_hashed_vocabulary, rmm::mr::device_memory_resource* mr) { CUDF_FUNC_RANGE(); return detail::load_vocabulary_file(filename_hashed_vocabulary, cudf::get_default_stream(), mr); } } // namespace nvtext
0
rapidsai_public_repos/cudf/cpp/src/text
rapidsai_public_repos/cudf/cpp/src/text/subword/wordpiece_tokenizer.cu
/* * Copyright (c) 2020-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include <text/subword/detail/hash_utils.cuh> #include <text/subword/detail/tokenizer_utils.cuh> #include <text/subword/detail/wordpiece_tokenizer.hpp> #include <cudf/detail/utilities/cuda.cuh> #include <cudf/strings/string_view.cuh> #include <cudf/utilities/error.hpp> #include <nvtext/subword_tokenize.hpp> #include <rmm/cuda_stream_view.hpp> #include <rmm/exec_policy.hpp> #include <thrust/copy.h> #include <thrust/distance.h> #include <thrust/execution_policy.h> #include <thrust/fill.h> #include <thrust/find.h> #include <thrust/for_each.h> #include <thrust/functional.h> #include <thrust/iterator/counting_iterator.h> #include <thrust/remove.h> #include <thrust/transform.h> #include <thrust/transform_scan.h> namespace nvtext { namespace detail { namespace { /** * @brief Initializes the token-ids, word-indices, and token counts vectors. * * Each thread process a single code point from `code_points`. * This also locates the start and end of each word within the `code_points` buffer. * A word start is identified as a non-space character that appears right after a space. * A word end is identified as a space character that appears right after a non-space one. * If the code point at this thread does not represent a word start or word end, * a max uint32_t value is written to the appropriate vector instead. * A post processing step is required to filter the relevant values in these * vectors. * * It is guaranteed that the same number of valid values will be written to both the * start and end indices and that after the select step, the two arrays will be aligned. * That is, `start_word_indices[word]` and `end_word_indices[word]` are the start and * end for the same word. * * Memory required is 13 bytes per code point values: * - 4 bytes each for `start_word_indices` and `end_word_indices` * - 4 bytes for each `token_ids` * - 1 byte for each `tokens_per_word` * Also, there is a code point value for each byte in the input strings. * * @param[in] code_points A pointer to the code points in the strings after normalization. * @param[out] start_word_indices An array of size `num_code_points` which will contain the * starting index for each word. * @param[out] end_word_indices An array of size `num_code_points` which will contain the * ending index for each word. * @param num_code_points The total number of code_points. * @param[out] token_ids An array of size `num_code_points` which will hold the token ids. * This kernel just sets all the values to max uint32_t. * @param[out] tokens_per_word An array of size `num_code_points` which hold the number of * tokens. This kernel just sets all the values to 0. */ __global__ void init_data_and_mark_word_start_and_ends(uint32_t const* code_points, uint32_t* start_word_indices, uint32_t* end_word_indices, size_t num_code_points, uint32_t* token_ids, uint8_t* tokens_per_word) { cudf::thread_index_type char_for_thread = static_cast<cudf::thread_index_type>(blockDim.x) * static_cast<cudf::thread_index_type>(blockIdx.x) + threadIdx.x; // Deal with the start_word_indices array if (char_for_thread < num_code_points) { uint32_t val_to_write = std::numeric_limits<uint32_t>::max(); if ((code_points[char_for_thread] != SPACE_CODE_POINT) && (char_for_thread > 0) && (code_points[char_for_thread - 1] == SPACE_CODE_POINT)) { val_to_write = char_for_thread; } start_word_indices[char_for_thread] = val_to_write; // Deal with the end_word_indices_array val_to_write = std::numeric_limits<uint32_t>::max(); if ((code_points[char_for_thread] != SPACE_CODE_POINT) && (char_for_thread + 1 < num_code_points) && (code_points[char_for_thread + 1] == SPACE_CODE_POINT)) { val_to_write = char_for_thread + 1; } end_word_indices[char_for_thread] = val_to_write; token_ids[char_for_thread] = std::numeric_limits<uint32_t>::max(); tokens_per_word[char_for_thread] = 0; } } /** * @brief Resolves the string boundaries for the start and end words. * * This kernel should be called after `init_data_and_mark_word_start_and_ends` with at * least `num_strings` total threads. * * The start and end indices are updated to honor the string boundaries * within the strings array. This corrects any word ranges that span across * individual strings. * * @param code_points A pointer to the code points in the strings. * @param strings_offsets An array containing the index of the starting character of each string * with an extra space at the end containing the total number of characters. As a result, * this array is of length num_strings + 1. * @param start_word_indices An array which will contain the starting index for each word scattered * throughout. If an index does not represent a word start, the max-uint32_t value is written * to indicate this. * @param end_word_indices An array which will contain the one past the end index for each word * scattered throughout. If an index does not represent a word end, the max uint32_t value is * written to indicate this. * @param num_strings The total number of strings to be processed. */ __global__ void mark_string_start_and_ends(uint32_t const* code_points, cudf::size_type const* strings_offsets, uint32_t* start_word_indices, uint32_t* end_word_indices, uint32_t num_strings) { cudf::thread_index_type idx = static_cast<cudf::thread_index_type>(blockDim.x) * static_cast<cudf::thread_index_type>(blockIdx.x) + threadIdx.x; // Ensure the starting character of each strings is written to the word start array. if (idx <= num_strings) { auto const offset = strings_offsets[idx]; if ((idx < num_strings) && (code_points[offset] != SPACE_CODE_POINT)) { start_word_indices[offset] = offset; } if ((offset > 0) && (code_points[offset - 1] != SPACE_CODE_POINT)) { end_word_indices[offset - 1] = offset; } } } /** * @brief Currently supported special tokens. * * Code logic expects these to be 3 upper-case characters along * with a single trailing space. */ __constant__ char special_tokens[35]{"BOS EOS UNK SEP PAD CLS MASK "}; constexpr cudf::size_type MIN_ST_WIDTH = 4; // Min token size in special_tokens constexpr cudf::size_type MAX_ST_WIDTH = 5; // Max token size in special_tokens struct mark_special_tokens { /** * @brief Check given code-point array to the list of known * special tokens. */ __device__ bool is_special_token(uint32_t const* token, cudf::size_type size) const { if (size < MIN_ST_WIDTH || size > MAX_ST_WIDTH) return false; char str_token[MAX_ST_WIDTH]; // convert code-points to chars thrust::transform(thrust::seq, token, token + size, str_token, [](uint32_t cp) { // also upper-case them to match again special_tokens array return static_cast<char>(cp >= 'a' ? cp - 'a' + 'A' : cp); }); // search the special tokens array for the str_token cudf::string_view tokens(special_tokens, sizeof(special_tokens)); return tokens.find(str_token, size) != cudf::string_view::npos; } /** * @brief Check code-points for special tokens and adjust indices. * * Tokens will appear in the `code_points` array as: * `_[_ttt_]_` where `_` are single space characters and * ttt is the variable-length token name * * The logic below uses the following variables to represent position * values in the `code_points` array after locating a special token: * ``` * _ [ _ t t t _ ] _ * ^ ^ ^ ^ * si sp ep ei * ``` * where `si` is `start_index` * `sp` is `start_pos` * `ep` is `end_pos` * `ei` is `end_index` * * When a special token is found, the `code_points` are adjusted * to remove the spaces and capitalize the name. * ``` * _ [ _ t t t _ ] _ is updated to * _ [ T T T ] _ ] _ * ``` * This is required for the downstream word-piece tokenizer to * match it to the vocabulary hash table. * * The `start_word_indices` and `end_word_indices` are updated to * identify the token and to ignore the extra trailing `]` character. */ __device__ void operator()(size_t idx) const { uint32_t const start_index = start_word_indices[idx]; if ((start_index == std::numeric_limits<uint32_t>::max()) || ((start_index + MIN_ST_WIDTH + 2) > num_code_points)) return; if (code_points[start_index] != '[') return; // check for matching end bracket uint32_t const start_pos = start_index + 2; // after the space delimiter // search for next start-word and then check it is a ']' uint32_t const end_index = [&] { auto const begin = start_word_indices + start_pos; auto const width = std::min(static_cast<size_t>(MAX_ST_WIDTH + 1), (num_code_points - start_pos)); auto const end = begin + width; // checking the next start-word is more reliable than arbitrarily searching for ']' // in case the text is split across string rows auto const iter = thrust::find_if(thrust::seq, begin + 1, end, [](auto swi) { return swi != std::numeric_limits<uint32_t>::max(); }); return iter == end ? start_index : static_cast<uint32_t>(iter - start_word_indices); }(); if (code_points[end_index] != ']') return; // check for special token auto const size = static_cast<cudf::size_type>(end_index - start_pos); if (!is_special_token(code_points + start_pos, size)) return; // special token found // adjust code-points auto const end_pos = end_index - 2; // change _[_ttt_]_ to _[TTT]_ for (auto left_idx = start_pos - 1; left_idx <= end_pos; ++left_idx) { auto const cp = code_points[left_idx + 1]; code_points[left_idx] = cp >= 'a' ? cp - 'a' + 'A' : cp; } code_points[end_pos] = ']'; // erase the intermediate indices thrust::fill(thrust::seq, start_word_indices + start_index + 1, // keep the first one start_word_indices + end_index + 1, std::numeric_limits<uint32_t>::max()); thrust::fill(thrust::seq, end_word_indices + start_index, end_word_indices + end_index + 1, std::numeric_limits<uint32_t>::max()); // reset the new end-word index end_word_indices[end_pos] = end_pos + 1; } uint32_t* const code_points; uint32_t* const start_word_indices; uint32_t* const end_word_indices; size_t const num_code_points; }; /** * @brief Converts words into token ids. * * Each thread is assigned a word to convert based on the `hash_table`. Each thread converts * its word and writes the number of tokens it found in the `tokens_per_word` array. * * The `tokens_per_word` array is kept to the length `num_code_points + 1`. This means each thread * can write its number of tokens to the `tokens_per_word` corresponding to the starting * character of each word. Since strings must start at some word, we can prefix sum this array * and use the strings_lengths code point offsets to directly index the number of tokens in each * string. * * The `token_ids` array should be initialized to the max uint32_t before calling this kernel. * * @param code_points An array containing all of the code points to be processed * @param hash_table An array containing the flattened hash table with key, value pairs * packed in 64-bits * @param bin_coefficients A pointer to the GPU pointer containing the hashing parameters for * each hash bin on the GPU. * @param bin_offsets: A pointer to the GPU pointer containing the start index of each bin in * the flattened hash table. * @param token_ids The index for each token found during tokenization. This is of length * num_code_points. In most cases, multiple characters will collapse to one token. In these * cases, the max uint32_t will be in place. Cub will be used later to filter out these * invalid ids later. * @param word_starts An array of length `num_code_points`. The first total word elements contains * the index of the first character for each word. * @param word_ends An array of length num_code_points. The first total_words elements contains the * past the end index for each word. This array is kept aligned with the initial * token_ids array containing the word start code points. * `word_ends[word] - filtered_start_indices[word] = word_length` * @param tokens_per_word An array of size num_code_points that will contain the number of tokens in * each word in a string. This array can be exclusive summed and the result used in * conjunction with the strings lengths array to find the tokens in each string. This is * possible since the number of tokens in each word will be placed at the index corresponding * to the start character of a word. If we assume prefix_summed is the prefix sum of the * tokens_per_word array, then `prefix_summed[strings_lengths[string_idx] - 1]` is the number * of tokens found before the start of string. * @param unk_token_id The token id to be place for unknown tokens * @param max_word_length The maximum length of a word. Any word longer than this length is * replaced by the unknown token. * @param total_words The total number of white space separated words * @param outer_hash_a_param The a parameter for the outer hash * @param outer_hash_b_param: The b parameter for the outer hash * @param num_outer_bins: The number of bins for the outer hash */ __global__ void kernel_wordpiece_tokenizer(uint32_t const* code_points, uint64_t const* hash_table, uint64_t const* bin_coefficients, uint16_t const* bin_offsets, uint16_t unk_token_id, uint32_t outer_hash_a_param, uint32_t outer_hash_b_param, uint16_t num_outer_bins, uint32_t const* word_starts, uint32_t const* word_ends, uint32_t max_word_length, uint32_t total_words, uint32_t* token_ids, uint8_t* tokens_per_word) { cudf::thread_index_type word_to_tokenize = static_cast<cudf::thread_index_type>(blockDim.x) * static_cast<cudf::thread_index_type>(blockIdx.x) + threadIdx.x; if (word_to_tokenize >= total_words) return; // Each thread gets the start code_point offset for each word and resets the token_id memory to // the default value. In a post processing step, all of these values will be removed. auto const token_start = word_starts[word_to_tokenize]; auto const token_end = word_ends[word_to_tokenize]; auto const word_length = token_end - token_start; // The sdbm hash of "##" constexpr uint32_t hashtag_hash = 2296000; uint16_t num_values_tokenized = 0; // initialize start, end uint32_t start = token_start; uint32_t end = token_end; if (word_length > max_word_length) { start = token_end; num_values_tokenized = 1; token_ids[token_start] = unk_token_id; tokens_per_word[token_start] = num_values_tokenized; } while (start < token_end) { end = token_end; // init token_id to no token int token_id = -1; // compute current length uint32_t const length = token_end - start; uint64_t substr_hash = sdbm_hash(code_points + start, length, start == token_start ? 0 : hashtag_hash); while (start < end) { token_id = retrieve(substr_hash, outer_hash_a_param, outer_hash_b_param, num_outer_bins, hash_table, bin_coefficients, bin_offsets); if (token_id != -1) { break; } --end; // Pop off the last value from the substr hash substr_hash = prev_sdbm_hash(substr_hash, code_points[end]); } if (token_id == -1) { end = token_end; token_id = unk_token_id; // We need to clean up the global array. This case is very uncommon. // Only 0.016% of words cannot be resolved to a token from the squad dev set. for (uint32_t i = 1; i < num_values_tokenized; ++i) { token_ids[token_start + i] = std::numeric_limits<uint32_t>::max(); } num_values_tokenized = 0; } token_ids[token_start + num_values_tokenized] = token_id; ++num_values_tokenized; start = end; } tokens_per_word[token_start] = num_values_tokenized; } } // namespace wordpiece_tokenizer::wordpiece_tokenizer(hashed_vocabulary const& vocab_table, uint32_t max_sequence_length, uint32_t stride, bool do_truncate, bool do_lower_case, uint32_t max_word_length) : vocab_table(vocab_table), normalizer(vocab_table.cp_metadata->view().data<codepoint_metadata_type>(), vocab_table.aux_cp_table->view().data<aux_codepoint_data_type>(), do_lower_case), max_sequence_length{max_sequence_length}, stride(stride), do_truncate(do_truncate), max_word_length{max_word_length} { } uvector_pair wordpiece_tokenizer::tokenize(char const* d_strings, cudf::size_type const* d_offsets, cudf::size_type num_strings, rmm::cuda_stream_view stream) { auto cps_and_offsets = normalizer.normalize(d_strings, d_offsets, num_strings, stream); tokenize(cps_and_offsets, stream); return uvector_pair(std::move(cps_and_offsets.first), std::move(cps_and_offsets.second)); } struct copy_if_fn { // inline lambda not allowed in private or protected member function __device__ bool operator()(uint32_t cp) { return cp != std::numeric_limits<uint32_t>::max(); } }; struct tranform_fn { // just converting uint8 value to uint32 __device__ uint32_t operator()(uint8_t count) { return count; } }; void wordpiece_tokenizer::tokenize(uvector_pair& cps_and_offsets, rmm::cuda_stream_view stream) { auto device_code_points = cps_and_offsets.first->data(); auto const num_code_points = cps_and_offsets.first->size(); auto device_strings_offsets = cps_and_offsets.second->data(); auto const num_strings = cps_and_offsets.second->size() - 1; size_t const four_byte_cp_chunks = 1 + (num_code_points - 1) / sizeof(uint32_t); size_t const rounded_num_cps = sizeof(uint32_t) * four_byte_cp_chunks; rmm::device_uvector<uint8_t> device_tokens_per_word(rounded_num_cps, stream); rmm::device_uvector<uint32_t> device_token_ids(num_code_points, stream); rmm::device_uvector<uint32_t> device_word_indices(2 * num_code_points, stream); // make device_start_word_indices and device_end_word_indices contiguous uint32_t* device_start_word_indices = device_word_indices.data(); uint32_t* device_end_word_indices = device_start_word_indices + num_code_points; cudf::detail::grid_1d const grid_init{static_cast<cudf::size_type>(num_code_points), THREADS_PER_BLOCK}; detail::init_data_and_mark_word_start_and_ends<<<grid_init.num_blocks, grid_init.num_threads_per_block, 0, stream.value()>>>(device_code_points, device_start_word_indices, device_end_word_indices, num_code_points, device_token_ids.data(), device_tokens_per_word.data()); CUDF_CHECK_CUDA(stream.value()); cudf::detail::grid_1d const grid_mark{static_cast<cudf::size_type>(num_strings + 1), THREADS_PER_BLOCK}; detail::mark_string_start_and_ends<<<grid_mark.num_blocks, grid_mark.num_threads_per_block, 0, stream.value()>>>(device_code_points, device_strings_offsets, device_start_word_indices, device_end_word_indices, num_strings); CUDF_CHECK_CUDA(stream.value()); // check for special tokens and adjust indices thrust::for_each_n( rmm::exec_policy(stream), thrust::make_counting_iterator<size_t>(0), num_code_points, mark_special_tokens{ device_code_points, device_start_word_indices, device_end_word_indices, num_code_points}); // Now start_word_indices has the word starts scattered throughout the array. We need to select // all values not equal to the max uint32_t and place them at the start of the array. We leverage // the fact that the start_word_indices and the end_word indices are contiguous to only launch one // device select kernel. auto itr_end = thrust::remove(rmm::exec_policy(stream), device_word_indices.begin(), device_word_indices.end(), std::numeric_limits<uint32_t>::max()); // The number of tokens selected will be double the number of words since we // select from both the start and end index arrays. uint32_t const num_words = thrust::distance(device_word_indices.begin(), itr_end) / 2; // We need to change the end_word_indices pointer after the selection is complete device_end_word_indices = device_start_word_indices + num_words; if (num_words > 0) { cudf::detail::grid_1d const grid{static_cast<cudf::size_type>(num_words), THREADS_PER_BLOCK}; detail::kernel_wordpiece_tokenizer<<<grid.num_blocks, grid.num_threads_per_block, 0, stream.value()>>>( device_code_points, vocab_table.table->view().data<uint64_t>(), vocab_table.bin_coefficients->view().data<uint64_t>(), vocab_table.bin_offsets->view().data<uint16_t>(), vocab_table.unknown_token_id, vocab_table.outer_hash_a, vocab_table.outer_hash_b, vocab_table.num_bins, device_start_word_indices, device_end_word_indices, max_word_length, num_words, device_token_ids.data(), device_tokens_per_word.data()); CUDF_CHECK_CUDA(stream.value()); } // Repurpose the input array for the token ids. In the worst case, each code point ends up being a // token so this will always have enough memory to store the contiguous tokens. uint32_t* contiguous_token_ids = device_code_points; auto const copy_size = // thrust::copy_if limited to copying int-max values std::min(device_token_ids.size(), static_cast<std::size_t>(std::numeric_limits<int>::max())); auto ids_itr = device_token_ids.begin(); auto const ids_end = device_token_ids.end(); while (ids_itr != ids_end) { auto const copy_end = (static_cast<std::size_t>(std::distance(ids_itr, ids_end)) <= copy_size) ? ids_end : ids_itr + copy_size; contiguous_token_ids = thrust::copy_if( rmm::exec_policy(stream), ids_itr, copy_end, contiguous_token_ids, copy_if_fn{}); ids_itr = copy_end; } // Repurpose start word indices since it is the same size and type as the required output. uint32_t* token_id_counts = device_start_word_indices; thrust::transform_inclusive_scan(rmm::exec_policy(stream), device_tokens_per_word.data(), device_tokens_per_word.data() + num_code_points, token_id_counts, tranform_fn{}, thrust::plus<uint32_t>()); // Update the device_strings_offsets using the token_id_counts thrust::for_each_n(rmm::exec_policy(stream), thrust::make_counting_iterator<uint32_t>(1), num_strings, update_strings_lengths_fn{token_id_counts, device_strings_offsets}); } } // namespace detail } // namespace nvtext
0
rapidsai_public_repos/cudf/cpp/src/text
rapidsai_public_repos/cudf/cpp/src/text/subword/data_normalizer.cu
/* * Copyright (c) 2020-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include <text/subword/detail/data_normalizer.hpp> #include <text/subword/detail/tokenizer_utils.cuh> #include <cudf/detail/utilities/cuda.cuh> #include <cudf/strings/detail/utilities.cuh> #include <cudf/utilities/error.hpp> #include <rmm/cuda_stream_view.hpp> #include <rmm/exec_policy.hpp> #include <thrust/for_each.h> #include <thrust/iterator/counting_iterator.h> #include <thrust/pair.h> #include <thrust/remove.h> #include <thrust/scan.h> #include <thrust/transform.h> namespace nvtext { namespace detail { namespace { /** * @brief Bit used to filter out invalid code points. * * When normalizing characters to code point values, if this bit is set, * the code point should be filtered out before returning from the normalizer. */ constexpr uint32_t FILTER_BIT = 22; /** * @brief Retrieve new code point from metadata value. * * @param metadata Value from the codepoint_metadata table. * @return The replacement character if appropriate. */ __device__ uint32_t get_first_cp(uint32_t metadata) { return metadata & NEW_CP_MASK; } /** * @brief Retrieve token category from the metadata value. * * Category values are 0-5: * 0 - character should be padded * 1 - pad character if lower-case * 2 - character should be removed * 3 - remove character if lower-case * 4 - whitespace character -- always replace * 5 - uncategorized * * @param metadata Value from the codepoint_metadata table. * @return Category value. */ __device__ uint32_t extract_token_cat(uint32_t metadata) { return (metadata >> TOKEN_CAT_SHIFT) & TOKEN_CAT_MASK; } /** * @brief Return true if category of metadata value specifies the character should be replaced. */ __device__ bool should_remove_cp(uint32_t metadata, bool lower_case) { auto const cat = extract_token_cat(metadata); return (cat == TOKEN_CAT_REMOVE_CHAR) || (lower_case && (cat == TOKEN_CAT_REMOVE_CHAR_IF_LOWER)); } /** * @brief Return true if category of metadata value specifies the character should be padded. */ __device__ bool should_add_spaces(uint32_t metadata, bool lower_case) { auto const cat = extract_token_cat(metadata); return (cat == TOKEN_CAT_ADD_SPACE) || (lower_case && (cat == TOKEN_CAT_ADD_SPACE_IF_LOWER)); } /** * @brief Return true if category of metadata value specifies the character should be replaced. */ __device__ bool always_replace(uint32_t metadata) { return extract_token_cat(metadata) == TOKEN_CAT_ALWAYS_REPLACE; } /** * @brief Returns true if metadata value includes a multi-character transform bit equal to 1. */ __device__ bool is_multi_char_transform(uint32_t metadata) { return (metadata >> MULTICHAR_SHIFT) & MULTICHAR_MASK; } /** * @brief Returns true if the byte passed in could be a valid head byte for * a utf8 character. That is, not binary `10xxxxxx` */ __device__ bool is_head_byte(unsigned char utf8_byte) { return (utf8_byte >> 6) != 2; } /** * @brief Converts a UTF-8 character into a unicode code point value. * * If the byte at start_byte_for_thread is the first byte of a UTF-8 character (head byte), * the UTF-8 character is converted to a unicode code point and returned. * * If the byte at start_byte_for_thread is not a head byte, 0 is returned. * * All threads start reading bytes from the pointer denoted by strings. * * @param strings A pointer to the start of the sequence of characters to be analyzed. * @param start_byte_for_thread Which byte to start analyzing * @return New code point value for this byte. */ __device__ uint32_t extract_code_points_from_utf8(unsigned char const* strings, size_t const total_bytes, cudf::thread_index_type const start_byte_for_thread) { constexpr uint8_t max_utf8_blocks_for_char = 4; uint8_t utf8_blocks[max_utf8_blocks_for_char] = {0}; for (int i = 0; i < std::min(static_cast<size_t>(max_utf8_blocks_for_char), total_bytes - start_byte_for_thread); ++i) { utf8_blocks[i] = strings[start_byte_for_thread + i]; } uint8_t const length_encoding_bits = utf8_blocks[0] >> 3; // UTF-8 format is variable-width character encoding using up to 4 bytes. // If the first byte is: // - [x00-x7F] -- beginning of a 1-byte character (ASCII) // - [xC0-xDF] -- beginning of a 2-byte character // - [xE0-xEF] -- beginning of a 3-byte character // - [xF0-xF7] -- beginning of a 3-byte character // Anything else is an intermediate byte [x80-xBF]. // So shifted by 3 bits this becomes // - [x00-x0F] or leb < 16 // - [x18-x1B] or 24 <= leb <= 27 // - [x1C-x1D] or 28 <= leb <= 29 // - [x1E-x1F] or leb >= 30 // The remaining bits are part of the value as specified by the mask // specified by x's below. // - b0xxxxxxx = x7F // - b110xxxxx = x1F // - b1110xxxx = x0F // - b11110xxx = x07 using encoding_length_pair = thrust::pair<uint8_t, uint8_t>; // Set the number of characters and the top masks based on the length encoding bits. encoding_length_pair const char_encoding_length = [length_encoding_bits] { if (length_encoding_bits < 16) return encoding_length_pair{1, 0x7F}; if (length_encoding_bits >= 24 && length_encoding_bits <= 27) return encoding_length_pair{2, 0x1F}; if (length_encoding_bits == 28 || length_encoding_bits == 29) return encoding_length_pair{3, 0x0F}; if (length_encoding_bits == 30) return encoding_length_pair{4, 0x07}; return encoding_length_pair{0, 0}; }(); // Now pack up the bits into a uint32_t. // Move the first set of values into bits 19-24 in the 32-bit value. uint32_t code_point = (utf8_blocks[0] & char_encoding_length.second) << 18; // Move the remaining values which are 6 bits (mask b10xxxxxx = x3F) // from the remaining bytes into successive positions in the 32-bit result. code_point |= ((utf8_blocks[1] & 0x3F) << 12); code_point |= ((utf8_blocks[2] & 0x3F) << 6); code_point |= utf8_blocks[3] & 0x3F; // Adjust the final result by shifting by the character length. uint8_t const shift_amt = 24 - 6 * char_encoding_length.first; code_point >>= shift_amt; return code_point; } /** * @brief Normalize the characters for the strings input. * * Characters are replaced, padded, or removed depending on the `do_lower_case` input * as well as the metadata values for each code point found in `cp_metadata`. * * First, each character is converted from UTF-8 to a unicode code point value. * This value is then looked up in the `cp_metadata` table to determine its fate. * The end result is a set of code point values for each character. * The normalized set of characters make it easier for the tokenizer to identify * tokens and match up token ids. * * @param[in] strings The input strings with characters to normalize to code point values. * @param[in] total_bytes Total number of bytes in the input `strings` vector. * @param[in] cp_metadata The metadata lookup table for every unicode code point value. * @param[in] aux_table Aux table for mapping some multi-byte code point values. * @param[in] do_lower_case True if normalization should include lower-casing. * @param[out] code_points The resulting code point values from normalization. * @param[out] chars_per_thread Output number of code point values per string. */ __global__ void kernel_data_normalizer(unsigned char const* strings, size_t const total_bytes, uint32_t const* cp_metadata, uint64_t const* aux_table, bool const do_lower_case, uint32_t* code_points, uint32_t* chars_per_thread) { constexpr uint32_t init_val = (1 << FILTER_BIT); uint32_t replacement_code_points[MAX_NEW_CHARS] = {init_val, init_val, init_val}; cudf::thread_index_type const char_for_thread = threadIdx.x + cudf::thread_index_type(blockIdx.x) * cudf::thread_index_type(blockDim.x); uint32_t num_new_chars = 0; if (char_for_thread < total_bytes) { auto const code_point = extract_code_points_from_utf8(strings, total_bytes, char_for_thread); auto const metadata = cp_metadata[code_point]; if (is_head_byte(strings[char_for_thread]) && !should_remove_cp(metadata, do_lower_case)) { num_new_chars = 1; // Apply lower cases and accent stripping if necessary auto const new_cp = do_lower_case || always_replace(metadata) ? get_first_cp(metadata) : code_point; replacement_code_points[0] = new_cp == 0 ? code_point : new_cp; if (do_lower_case && is_multi_char_transform(metadata)) { auto const next_cps = aux_table[code_point]; replacement_code_points[1] = static_cast<uint32_t>(next_cps >> 32); auto const potential_next_cp = static_cast<uint32_t>(next_cps); replacement_code_points[2] = potential_next_cp != 0 ? potential_next_cp : replacement_code_points[2]; num_new_chars = 2 + (potential_next_cp != 0); } if (should_add_spaces(metadata, do_lower_case)) { // Need to shift all existing code-points up one // This is a rotate right. There is no thrust equivalent at this time. for (int loc = num_new_chars; loc > 0; --loc) { replacement_code_points[loc] = replacement_code_points[loc - 1]; } // Write the required spaces at the end replacement_code_points[0] = SPACE_CODE_POINT; replacement_code_points[num_new_chars + 1] = SPACE_CODE_POINT; num_new_chars += 2; } } } chars_per_thread[char_for_thread] = num_new_chars; using BlockStore = cub::BlockStore<uint32_t, THREADS_PER_BLOCK, MAX_NEW_CHARS, cub::BLOCK_STORE_WARP_TRANSPOSE>; __shared__ typename BlockStore::TempStorage temp_storage; // Now we perform coalesced writes back to global memory using cub. uint32_t* block_base = code_points + blockIdx.x * blockDim.x * MAX_NEW_CHARS; BlockStore(temp_storage).Store(block_base, replacement_code_points); } } // namespace data_normalizer::data_normalizer(codepoint_metadata_type const* cp_metadata, aux_codepoint_data_type const* aux_table, bool do_lower_case) : d_cp_metadata{cp_metadata}, d_aux_table{aux_table}, do_lower_case{do_lower_case} { } uvector_pair data_normalizer::normalize(char const* d_strings, cudf::size_type const* d_offsets, cudf::size_type num_strings, rmm::cuda_stream_view stream) const { if (num_strings == 0) { return uvector_pair{std::make_unique<rmm::device_uvector<uint32_t>>(0, stream), std::make_unique<rmm::device_uvector<cudf::size_type>>(0, stream)}; } // copy offsets to working memory auto const num_offsets = num_strings + 1; auto d_strings_offsets = std::make_unique<rmm::device_uvector<cudf::size_type>>(num_offsets, stream); thrust::transform(rmm::exec_policy(stream), thrust::counting_iterator<cudf::size_type>(0), thrust::counting_iterator<cudf::size_type>(num_offsets), d_strings_offsets->begin(), [d_offsets] __device__(auto idx) { auto const offset = d_offsets[0]; // adjust for any offset to the offsets return d_offsets[idx] - offset; }); auto const bytes_count = d_strings_offsets->element(num_strings, stream); if (bytes_count == 0) { // if no bytes, nothing to do return uvector_pair{std::make_unique<rmm::device_uvector<uint32_t>>(0, stream), std::make_unique<rmm::device_uvector<cudf::size_type>>(0, stream)}; } cudf::detail::grid_1d const grid{bytes_count, THREADS_PER_BLOCK, 1}; size_t const threads_on_device = grid.num_threads_per_block * grid.num_blocks; size_t const max_new_char_total = MAX_NEW_CHARS * threads_on_device; auto d_code_points = std::make_unique<rmm::device_uvector<uint32_t>>(max_new_char_total, stream); rmm::device_uvector<uint32_t> d_chars_per_thread(threads_on_device, stream); kernel_data_normalizer<<<grid.num_blocks, grid.num_threads_per_block, 0, stream.value()>>>( reinterpret_cast<unsigned char const*>(d_strings), bytes_count, d_cp_metadata, d_aux_table, do_lower_case, d_code_points->data(), d_chars_per_thread.data()); // Remove the 'empty' code points from the vector thrust::remove(rmm::exec_policy(stream), d_code_points->begin(), d_code_points->end(), uint32_t{1 << FILTER_BIT}); // We also need to prefix sum the number of characters up to an including // the current character in order to get the new strings lengths. thrust::inclusive_scan(rmm::exec_policy(stream), d_chars_per_thread.begin(), d_chars_per_thread.end(), d_chars_per_thread.begin()); // This will reset the offsets to the new generated code point values thrust::for_each_n( rmm::exec_policy(stream), thrust::make_counting_iterator<uint32_t>(1), num_strings, update_strings_lengths_fn{d_chars_per_thread.data(), d_strings_offsets->data()}); auto const num_chars = d_strings_offsets->element(num_strings, stream); d_code_points->resize(num_chars, stream); // should be smaller than original allocated size // return the normalized code points and the new offsets return uvector_pair(std::move(d_code_points), std::move(d_strings_offsets)); } } // namespace detail } // namespace nvtext
0
rapidsai_public_repos/cudf/cpp/src/text
rapidsai_public_repos/cudf/cpp/src/text/subword/subword_tokenize.cu
/* * Copyright (c) 2020-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include <cudf/column/column_device_view.cuh> #include <cudf/column/column_factories.hpp> #include <cudf/detail/get_value.cuh> #include <cudf/detail/nvtx/ranges.hpp> #include <cudf/detail/sequence.hpp> #include <cudf/detail/utilities/cuda.cuh> #include <cudf/scalar/scalar.hpp> #include <cudf/utilities/default_stream.hpp> #include <cudf/utilities/error.hpp> #include <nvtext/detail/load_hash_file.hpp> #include <nvtext/subword_tokenize.hpp> #include <text/subword/detail/wordpiece_tokenizer.hpp> #include <rmm/cuda_stream_view.hpp> #include <rmm/exec_policy.hpp> #include <thrust/for_each.h> #include <thrust/functional.h> #include <thrust/iterator/counting_iterator.h> #include <thrust/tabulate.h> #include <thrust/transform_scan.h> namespace nvtext { namespace detail { namespace { /** * @brief Convert tokens and row2tensor map to final tensor data. * * @param[in] token_ids Tokens from tokenizer * @param[in] offsets Offsets to each string's output row of tokens * @param[in] row2tensor String to tensor token counts * @param[in] row2row_within_tensor Token counts within sub-rows of the output * @param[in] max_sequence_length Maximum number of tokens in a row * @param[in] nrows_tensor_token_ids Total number of output tensor rows * @param[in] stride Number of tokens in sub-rows * @param[in] do_truncate True if tokens should not spill into sub-rows in the output * @param[out] final_tensor Output vector of token-ids * @param[out] attn_mask Identifies valid token id entries * @param[out] metadata Additional data per row */ __global__ void kernel_compute_tensor_metadata( // input uint32_t const* token_ids, cudf::size_type const* offsets, uint32_t const* row2tensor, uint32_t const* row2row_within_tensor, uint32_t max_sequence_length, uint32_t nrows_tensor_token_ids, uint32_t stride, bool do_truncate, // output uint32_t* final_tensor, uint32_t* attn_mask, uint32_t* metadata) { cudf::thread_index_type const output_idx = threadIdx.x + static_cast<cudf::thread_index_type>(blockIdx.x) * static_cast<cudf::thread_index_type>(blockDim.x); if (output_idx >= (static_cast<cudf::thread_index_type>(nrows_tensor_token_ids) * static_cast<cudf::thread_index_type>(max_sequence_length))) { return; } uint32_t const absolute_row_id = output_idx / max_sequence_length; uint32_t const tensor_id = row2tensor[absolute_row_id]; uint32_t const row_within_tensor = row2row_within_tensor[absolute_row_id]; uint32_t const offset_token_ids_tensor = offsets[tensor_id]; uint32_t const n_tokens_tensor = offsets[tensor_id + 1] - offset_token_ids_tensor; // check for last row within tensor bool const last_row_of_tensor = (absolute_row_id == nrows_tensor_token_ids - 1) || (row2tensor[absolute_row_id + 1] != tensor_id); // compute input offset to retrieve token ids uint32_t const token_idx = output_idx % max_sequence_length; uint32_t const row_offset_token_ids = offset_token_ids_tensor + token_idx + (row_within_tensor ? (max_sequence_length + (stride * (row_within_tensor - 1))) : 0); if (row_within_tensor == 0) { if (token_idx < n_tokens_tensor) { // copy token ids final_tensor[output_idx] = token_ids[row_offset_token_ids]; attn_mask[output_idx] = 1; } else { // pad with 0 final_tensor[output_idx] = 0; attn_mask[output_idx] = 0; } } else { uint32_t const n_replicates = max_sequence_length - stride; if ((row_offset_token_ids - n_replicates) < (offset_token_ids_tensor + n_tokens_tensor)) { // replicate elements from previous row or copy new tokens final_tensor[output_idx] = token_ids[row_offset_token_ids - n_replicates]; attn_mask[output_idx] = 1; } else { // pad with 0 final_tensor[output_idx] = 0; attn_mask[output_idx] = 0; } } // write metadata if (token_idx == 0) { auto const metadata_idx = absolute_row_id * 3; // three metadata values per output row metadata[metadata_idx] = tensor_id; metadata[metadata_idx + 1] = (row_within_tensor == 0) ? 0 : (max_sequence_length - stride) / 2; metadata[metadata_idx + 2] = [&] { if (!last_row_of_tensor) return max_sequence_length - (max_sequence_length - stride) / 2 - 1; if (n_tokens_tensor <= max_sequence_length) // we fit, all good return (n_tokens_tensor > 0) ? (n_tokens_tensor - 1) : 0; if (do_truncate) return (max_sequence_length - 1); auto const final_row_value = (max_sequence_length - stride) + (n_tokens_tensor - max_sequence_length) % stride; return (final_row_value > 0) ? (final_row_value - 1) : 0; }(); } } // this happens if there are no tokens in the input tokenizer_result build_empty_result(cudf::size_type size, uint32_t max_sequence_length, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { auto zero = cudf::numeric_scalar<uint32_t>(0, true, stream); auto ids = cudf::detail::sequence(size * max_sequence_length, zero, zero, stream, mr); auto mask = cudf::detail::sequence(size * max_sequence_length, zero, zero, stream, mr); auto metadata = cudf::make_numeric_column( cudf::data_type{cudf::type_id::UINT32}, size * 3, cudf::mask_state::UNALLOCATED, stream, mr); thrust::tabulate(rmm::exec_policy(stream), metadata->mutable_view().begin<uint32_t>(), metadata->mutable_view().end<uint32_t>(), [] __device__(auto idx) { return ((idx % 3) == 0) ? idx : 0; }); metadata->set_null_count(0); return tokenizer_result{ 0, max_sequence_length, std::move(ids), std::move(mask), std::move(metadata)}; } } // namespace tokenizer_result subword_tokenize(cudf::strings_column_view const& strings, hashed_vocabulary const& vocab_table, uint32_t max_sequence_length, uint32_t stride, bool do_lower_case, bool do_truncate, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { CUDF_EXPECTS(stride <= max_sequence_length, "stride must be less than or equal to max_sequence_length"); auto const strings_count = strings.size(); if (strings_count == strings.null_count()) { // empty or all-null returns empty return tokenizer_result{0, max_sequence_length, cudf::make_empty_column(cudf::data_type{cudf::type_id::UINT32}), cudf::make_empty_column(cudf::data_type{cudf::type_id::UINT32}), cudf::make_empty_column(cudf::data_type{cudf::type_id::UINT32})}; } CUDF_EXPECTS( max_sequence_length <= (static_cast<std::size_t>(std::numeric_limits<cudf::size_type>::max()) / strings_count), "max_sequence_length times number of input rows exceeds the column size limit", std::overflow_error); auto const offsets = strings.offsets(); auto const d_offsets = offsets.data<cudf::size_type>() + strings.offset(); auto const offset = cudf::detail::get_value<cudf::size_type>(offsets, strings.offset(), stream); auto const d_chars = strings.chars().data<char>() + offset; // Create tokenizer wordpiece_tokenizer tokenizer( vocab_table, max_sequence_length, stride, do_truncate, do_lower_case); // Run tokenizer auto const tokens = tokenizer.tokenize(d_chars, d_offsets, strings_count, stream); // assign output components auto device_token_ids = tokens.first->data(); auto device_offsets = tokens.second->data(); // Format output from tokenizer // Each string can create 1 or more tensor entries. // Compute the string-per-tensor offsets values by scanning // over the number of tokens for each string. rmm::device_uvector<uint32_t> offsets_per_tensor(strings_count + 1, stream); auto d_offsets_per_tensor = offsets_per_tensor.data(); thrust::transform_exclusive_scan( rmm::exec_policy(stream), thrust::make_counting_iterator<cudf::size_type>(0), thrust::make_counting_iterator<cudf::size_type>(strings_count + 1), offsets_per_tensor.begin(), [device_offsets, do_truncate, max_sequence_length, stride, strings_count] __device__( cudf::size_type idx) { uint32_t const num_tokens = idx < strings_count ? device_offsets[idx + 1] - device_offsets[idx] : 0; if (do_truncate || num_tokens <= max_sequence_length) return uint32_t{1}; return 1 + ((num_tokens - max_sequence_length + stride - 1) / stride); }, uint32_t{0}, thrust::plus<uint32_t>()); // last element is the total number of output rows uint32_t const nrows_tensor_token_ids = offsets_per_tensor.element(strings_count, stream); // if there are no tokens at all, build a specific empty result if (nrows_tensor_token_ids == 0) { return build_empty_result(strings_count, max_sequence_length, stream, mr); } // compute global_row to tensor, and global_row to within_tensor_row correspondence rmm::device_uvector<uint32_t> row2tensor(nrows_tensor_token_ids, stream); auto d_row2tensor = row2tensor.data(); rmm::device_uvector<uint32_t> row2row_within_tensor(nrows_tensor_token_ids, stream); auto d_row2row_within_tensor = row2row_within_tensor.data(); thrust::for_each_n( rmm::exec_policy(stream), thrust::make_counting_iterator<uint32_t>(0), strings_count, [d_offsets_per_tensor, d_row2tensor, d_row2row_within_tensor] __device__(auto idx) { uint32_t offset = d_offsets_per_tensor[idx]; uint32_t nrows = d_offsets_per_tensor[idx + 1] - offset; for (uint32_t jdx = 0; jdx < nrows; ++jdx) { d_row2tensor[jdx + offset] = idx; d_row2row_within_tensor[jdx + offset] = jdx; } }); // create output data columns auto tensor_token_ids = cudf::make_numeric_column(cudf::data_type{cudf::type_id::UINT32}, nrows_tensor_token_ids * max_sequence_length, cudf::mask_state::UNALLOCATED, stream, mr); auto tensor_attention_mask = cudf::make_numeric_column(cudf::data_type{cudf::type_id::UINT32}, nrows_tensor_token_ids * max_sequence_length, cudf::mask_state::UNALLOCATED, stream, mr); auto tensor_metadata = cudf::make_numeric_column(cudf::data_type{cudf::type_id::UINT32}, nrows_tensor_token_ids * 3, cudf::mask_state::UNALLOCATED, stream, mr); // compute final-tensor, mask, and metadata constexpr int block_size = 256; cudf::detail::grid_1d const grid{ static_cast<cudf::size_type>(nrows_tensor_token_ids * max_sequence_length), block_size}; kernel_compute_tensor_metadata<<<grid.num_blocks, grid.num_threads_per_block, 0, stream.value()>>>( device_token_ids, device_offsets, d_row2tensor, d_row2row_within_tensor, max_sequence_length, nrows_tensor_token_ids, stride, do_truncate, tensor_token_ids->mutable_view().data<uint32_t>(), tensor_attention_mask->mutable_view().data<uint32_t>(), tensor_metadata->mutable_view().data<uint32_t>()); return tokenizer_result{nrows_tensor_token_ids, max_sequence_length, std::move(tensor_token_ids), std::move(tensor_attention_mask), std::move(tensor_metadata)}; } } // namespace detail tokenizer_result subword_tokenize(cudf::strings_column_view const& strings, hashed_vocabulary const& vocabulary_table, uint32_t max_sequence_length, uint32_t stride, bool do_lower_case, bool do_truncate, rmm::mr::device_memory_resource* mr) { CUDF_FUNC_RANGE(); return detail::subword_tokenize(strings, vocabulary_table, max_sequence_length, stride, do_lower_case, do_truncate, cudf::get_default_stream(), mr); } } // namespace nvtext
0
rapidsai_public_repos/cudf/cpp/src/text/subword
rapidsai_public_repos/cudf/cpp/src/text/subword/detail/hash_utils.cuh
/* * Copyright (c) 2020-2021, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #pragma once #include <cstdint> namespace nvtext { namespace detail { // Used for hashing functions in this file constexpr uint64_t PRIME = 281474976710677; /** * @brief This does a multiply mod 48 without overflow for the sdbm hash "pop" method. * * This method computes the bottom 48 bits of the result of multiplying two numbers * respecting the restrictions specified by the parameters. * * It works by splitting `num` into 16 bit chunks and performing repeated multiplies. * The result of all of those multiplies are added together. * * @param num_48bit A multiplicand that is at most 48 bits. * @param num Any 64 bit number to multiply by num_48bit mod 2**48 * @return (num_48bit * num) mod 2**48 */ __device__ uint64_t mul_mod_48(uint64_t num_48bit, uint64_t num) { constexpr uint64_t mask = (1ULL << 48) - 1; constexpr uint8_t bit_chunk_size = 16; uint64_t result = 0; #pragma unroll for (uint8_t i = 0; i < sizeof(num) / 2; ++i) { auto const shift_amt = bit_chunk_size * i; auto const bottom_16 = static_cast<uint16_t>(num >> shift_amt); // update result result = result + ((num_48bit * bottom_16) << shift_amt); result &= mask; } return result; } /** * @brief Computes the sdbm hash for the sequence starting at sequence_start up to length sequences. * * A start value for the sdbm hash can optionally be given. This is useful when checking if elements * starting with "##" exist in the table since we can pass in the hash of "##" as the start value. * * @param sequence_start Code points to hash * @param length Number of code points to hash * @param start_value Initializes the hash computation. * @return The sdbm hash of all elements in range `[sequence_start, sequence_start + length)` */ __device__ uint64_t sdbm_hash(uint32_t const* sequence_start, uint32_t length, uint64_t start_value = 0) { // This expression computes h_{i} = (65599*h{i-1} + new_val) mod 2^48 and was obtained from here: // http://www.cse.yorku.ca/~oz/hash.html constexpr uint64_t mask = (1ULL << 48) - 1; uint64_t hash_value = start_value; for (int i = 0; i < length; ++i) { hash_value = ((hash_value << 6) + (hash_value << 16) - hash_value) & mask; hash_value = (hash_value + (sequence_start[i] & mask)) & mask; } return hash_value; } /** * @brief Removes the last value added to the hash. * * If we have `current_hash = sdbm_hash("dog")` then, `prev_sdbm_hash(current_hash, cp(g))` * returns the `sdbm_hash("do")` where it is assumed cp returns the unicode code point for a * given letter. * * @param current_hash The current value used to compute the previous sdbm. * @param last_val Last value used in the hash sequence. * @return The hash value before that new value was added. */ __device__ uint64_t prev_sdbm_hash(uint64_t current_hash, uint32_t last_val) { constexpr uint64_t mask = (1ULL << 48) - 1; // Multiplicative inverse of 65599 under mod 2**48 constexpr uint64_t mod_inverse = 24320495251391; uint64_t const prev_hash = mul_mod_48(mod_inverse, current_hash) - mul_mod_48(mod_inverse, last_val); return prev_hash & mask; } /** * @brief The hash function used for accesses to the table. * * This is a universal hash function with parameters chosen to achieve perfect hashing. * * Algorithm is `((a*k + b) % PRIME) % table_size` where @ref PRIME is globally defined * as 281474976710677 * * @param key Value to hash * @param a Outer table first constant * @param b Outer table second constant * @param table_size Number of bins in the hash table. * @return The computed hash value. */ __device__ uint32_t hash(uint64_t key, uint64_t a, uint64_t b, uint32_t table_size) { return ((a * key + b) % PRIME) % table_size; } /** * @brief Retrieves the value associated with key in the hash table. * * If there is no value in the table with the input key, -1 is returned. * * This method will ALWAYS return the correct value if a key is in the table. However, some * code point sequences may hash to the same key in which case an incorrect value is returned. * This collision is rare and will not likely affect the model's performance. * * @param key The key to search for in the hash table * @param hash_table A pointer to the flattened hash table * @param bin_coefficients A pointer to the hashing parameters for each bin in the hash table. * @param bin_offsets A pointer to the start of each bin in the hash table. * @return -1 if key is not in the hash table. If the key is in the table returns an index in * [0, vocab_size) indicating the index for the token in the bert model. */ __device__ int retrieve(uint64_t const key, uint32_t const outer_table_a, uint32_t const outer_table_b, uint16_t const num_bins, uint64_t const* hash_table, uint64_t const* bin_coefficients, uint16_t const* bin_offsets) { auto const hash_bin = hash(key, outer_table_a, outer_table_b, num_bins); auto const bin_params = bin_coefficients[hash_bin]; auto const start_ht_offset = bin_offsets[hash_bin]; // The shift constants are due to how the hash coefficients are packed and are // obtained from the python script perfect_hash.py which generates the expected tables. auto const inner_bin_a = bin_params >> 16; auto const inner_bin_b = (bin_params >> 9) & ((1 << 7) - 1); auto const bin_size = static_cast<uint8_t>(bin_params); if (bin_size == 0) { return -1; } // key hash has no bin parameters auto const inner_offset = hash(key, inner_bin_a, inner_bin_b, bin_size); auto const kv_pair = hash_table[start_ht_offset + inner_offset]; auto const expected_key = kv_pair >> 16; // extract value from encoded key-value int value = kv_pair & ((1 << 16) - 1); return key == expected_key ? value : -1; } } // namespace detail } // namespace nvtext
0
rapidsai_public_repos/cudf/cpp/src/text/subword
rapidsai_public_repos/cudf/cpp/src/text/subword/detail/tokenizer_utils.cuh
/* * Copyright (c) 2020-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #pragma once #include <text/subword/detail/cp_data.h> #include <cudf/types.hpp> #include <rmm/cuda_stream_view.hpp> #include <rmm/device_uvector.hpp> #include <cstdint> namespace nvtext { namespace detail { constexpr int THREADS_PER_BLOCK = 64; /** * @brief In-place update of offsets values. * * In the `d_chars_up_to_idx`, the last character of each string is basically * the offset (i.e. the number of characters) in that string. * * Example * @code{.pseudo} * // 3 strings with sizes 5,4,2 * d_offsets = [0,5,9,11] * // code points generated per character (as offsets) * // 2nd string has an extra code point at its first char * d_chars_up_to_idx = [1,2,3,4,5,6,8,9,10,11,12] * d_chars_up_to_idx[d_offsets[1-3]] is [5,10,12] * => d_offsets becomes [0,5,10,12] * @endcode */ struct update_strings_lengths_fn { uint32_t const* d_chars_up_to_idx; cudf::size_type* d_offsets; __device__ void operator()(cudf::size_type idx) { auto const offset = d_offsets[idx]; d_offsets[idx] = offset > 0 ? d_chars_up_to_idx[offset - 1] : 0; } }; /** * @brief Retrieve the code point metadata table. * * @param stream CUDA stream used for device memory operations and kernel launches. */ rmm::device_uvector<codepoint_metadata_type> get_codepoint_metadata(rmm::cuda_stream_view stream); /** * @brief Retrieve the auxiliary code point metadata table. * * @param stream CUDA stream used for device memory operations and kernel launches. */ rmm::device_uvector<aux_codepoint_data_type> get_aux_codepoint_data(rmm::cuda_stream_view stream); } // namespace detail } // namespace nvtext
0
rapidsai_public_repos/cudf/cpp/src/text/subword
rapidsai_public_repos/cudf/cpp/src/text/subword/detail/cp_data.h
/* * Copyright (c) 2020-2022, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #pragma once #include <cstdint> constexpr uint32_t NEW_CP_MASK = 0x1f'ffffu; constexpr uint32_t MULTICHAR_SHIFT = 23; constexpr uint32_t MULTICHAR_MASK = 1; constexpr uint32_t TOKEN_CAT_SHIFT = 24; constexpr uint32_t TOKEN_CAT_MASK = 7; constexpr uint32_t TOKEN_CAT_ADD_SPACE = 0; constexpr uint32_t TOKEN_CAT_ADD_SPACE_IF_LOWER = 1; constexpr uint32_t TOKEN_CAT_REMOVE_CHAR = 2; constexpr uint32_t TOKEN_CAT_REMOVE_CHAR_IF_LOWER = 3; constexpr uint32_t TOKEN_CAT_ALWAYS_REPLACE = 4; constexpr uint32_t SPACE_CODE_POINT = 32; constexpr uint32_t MAX_NEW_CHARS = 3; using codepoint_metadata_type = uint32_t; using aux_codepoint_data_type = uint64_t;
0
rapidsai_public_repos/cudf/cpp/src/text/subword
rapidsai_public_repos/cudf/cpp/src/text/subword/detail/wordpiece_tokenizer.hpp
/* * Copyright (c) 2020-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #pragma once #include <text/subword/detail/data_normalizer.hpp> #include <rmm/cuda_stream_view.hpp> namespace nvtext { struct hashed_vocabulary; namespace detail { /** * @brief This splits words into tokens contained in the model vocabulary file. * * The tokenizer first normalizes the character bytes, identifies the words in * each string, and then converts each word in to a integer token-id per the * provided vocabulary hash table. * * The `tokenize()` function produces two device vectors `uvector_pair`. * The first is the token-ids for each word identified in the input strings. * The second is the offsets to identify which ids go with each string. * * Temporary buffers are created equal to 3 uint32 values plus 1 byte per input byte. * Also the normalize step allocates an additional 16x bytes per input byte but 8x * of this memory is reused by the `tokenize()` function. * This means 13x + 8x = 21x the number bytes of the input strings buffer must be * available to call the `tokenize()` function in this class. */ class wordpiece_tokenizer { public: /** * @brief Creates a full tokenizer that cleans the text and splits it into tokens. * * @param vocab_table The preprocessed hashed vocabulary data. * @param max_sequence_length Limit the number of token-ids per row in the output * @param stride Each row in tensor-token-ids will replicate `max_sequence_length - stride` * token-ids from the previous row, unless it is the first string. * @param do_truncate If true, the tokenizer will discard all the token-ids after * `max_sequence_length` for each input string. If false, it will use a * new row in the tensor-token-ids to continue generating the output. * @param do_lower_case If true, the tokenizer will convert uppercase characters in the * input stream to lowercase and strip accents from those characters. * If false, accented and uppercase characters are not transformed. * @param max_word_length The length of the longest word that will be tokenized. Words * longer than this will simply be replaced by the unknown token * specified in the `vocab_file`. */ wordpiece_tokenizer(hashed_vocabulary const& vocab_table, uint32_t max_sequence_length, uint32_t stride, bool do_truncate, bool do_lower_case, uint32_t max_word_length = 200); /** * @brief Splits the input text into token ids. * * This class is simply a wrapper around the basic and word piece tokenizers. * * @param d_strings A vector of strings which MUST be encoded in the utf8 format. * @param d_offsets A vector of byte offsets to the beginning of individual strings in * the `d_strings` parameter. * @param num_strings The number of strings in `d_strings`. * @param stream CUDA stream used for device memory operations and kernel launches. * @return Pointer to token-ids and token-id offsets */ uvector_pair tokenize(char const* d_strings, cudf::size_type const* d_offsets, cudf::size_type num_strings, rmm::cuda_stream_view stream); private: /** * @brief Splits the code points from the normalizer into tokens. * * @param[in,out] cps_and_offsets The output code points and offsets * from the normalizer. * The data is modified to contain the token ids and token counts * per string. * @param stream CUDA stream used for device memory operations and kernel launches. */ void tokenize(uvector_pair& cps_and_offsets, rmm::cuda_stream_view stream); hashed_vocabulary const& vocab_table; data_normalizer normalizer; // removes punctuation, accents, etc uint32_t const max_sequence_length; uint32_t const stride; bool const do_truncate; uint32_t const max_word_length; }; } // namespace detail } // namespace nvtext
0
rapidsai_public_repos/cudf/cpp/src/text/subword
rapidsai_public_repos/cudf/cpp/src/text/subword/detail/codepoint_metadata.ah
/* * Copyright (c) 2020, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #pragma once #include <cstdint> // this file is included only by data_normalizer.cu // this is the metadata for every unicode code point value // it is broken into pieces since only 10% of the values are unique // some magic numbers constexpr uint32_t codepoint_metadata_size = 1114112; // 0x11'0000 constexpr uint32_t aux_codepoint_data_size = 119233; constexpr uint32_t codepoint_metadata_default_value = 83886080; // 0x0500'0000 constexpr uint32_t aux_codepoint_default_value = 0; constexpr uint32_t cp_section1_end = 195104; constexpr uint32_t cp_section2_begin = 917505; constexpr uint32_t cp_section2_end = 917999; constexpr uint32_t aux_section1_end = 6978; constexpr uint32_t aux_section2_begin = 44032; constexpr uint32_t aux_section2_end = 55203; constexpr uint32_t aux_section3_begin = 70475; constexpr uint32_t aux_section3_end = 71099; constexpr uint32_t aux_section4_begin = 119134; constexpr uint32_t aux_section4_end = 119232; using codepoint_metadata_type = uint32_t; using aux_codepoint_data_type = uint64_t; codepoint_metadata_type const codepoint_metadata[] = { 33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,67108896,67108896,33554464,33554464,67108896,33554432,33554432, 33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554464,33554464,33554464,33554464, 67108896,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,0,0,0,0,0,0, 0,83886177,83886178,83886179,83886180,83886181,83886182,83886183,83886184,83886185,83886186,83886187,83886188,83886189,83886190,83886191, 83886192,83886193,83886194,83886195,83886196,83886197,83886198,83886199,83886200,83886201,83886202,0,0,0,0,0, 0,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,0,0,0,0,33554432, 33554432,33554432,33554432,33554432,33554432,33554464,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432, 33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432, 67108896,0,83886080,83886080,83886080,83886080,83886080,0,83886080,83886080,83886080,0,83886080,33554432,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,0,0,83886080,83886080,83886080,0,83886080,83886080,83886080,0, 83886177,83886177,83886177,83886177,83886177,83886177,85983462,83886179,83886181,83886181,83886181,83886181,83886185,83886185,83886185,83886185, 85983472,83886190,83886191,83886191,83886191,83886191,83886191,83886080,85983480,83886197,83886197,83886197,83886197,83886201,85983486,83886080, 83886177,83886177,83886177,83886177,83886177,83886177,83886080,83886179,83886181,83886181,83886181,83886181,83886185,83886185,83886185,83886185, 83886080,83886190,83886191,83886191,83886191,83886191,83886191,83886080,83886080,83886197,83886197,83886197,83886197,83886201,83886080,83886201, 83886177,83886177,83886177,83886177,83886177,83886177,83886179,83886179,83886179,83886179,83886179,83886179,83886179,83886179,83886180,83886180, 85983505,83886080,83886181,83886181,83886181,83886181,83886181,83886181,83886181,83886181,83886181,83886181,83886183,83886183,83886183,83886183, 83886183,83886183,83886183,83886183,83886184,83886184,85983527,83886080,83886185,83886185,83886185,83886185,83886185,83886185,83886185,83886185, 83886185,83886080,85983539,83886080,83886186,83886186,83886187,83886187,83886080,83886188,83886188,83886188,83886188,83886188,83886188,85983552, 83886080,85983554,83886080,83886190,83886190,83886190,83886190,83886190,83886190,83886080,85983563,83886080,83886191,83886191,83886191,83886191, 83886191,83886191,85983571,83886080,83886194,83886194,83886194,83886194,83886194,83886194,83886195,83886195,83886195,83886195,83886195,83886195, 83886195,83886195,83886196,83886196,83886196,83886196,85983591,83886080,83886197,83886197,83886197,83886197,83886197,83886197,83886197,83886197, 83886197,83886197,83886197,83886197,83886199,83886199,83886201,83886201,83886201,83886202,83886202,83886202,83886202,83886202,83886202,83886080, 83886080,85983827,85983619,83886080,85983621,83886080,85983828,85983624,83886080,85983830,85983831,85983628,83886080,83886080,85983709,85983833, 85983835,85983634,83886080,85983840,85983843,83886080,85983849,85983848,85983641,83886080,83886080,83886080,85983855,85983858,83886080,85983861, 83886191,83886191,85983651,83886080,85983653,83886080,85983872,85983656,83886080,85983875,83886080,83886080,85983661,83886080,85983880,83886197, 83886197,85983882,85983883,85983668,83886080,85983670,83886080,85983890,85983673,83886080,83886080,83886080,85983677,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,85983686,85983686,83886080,85983689,85983689,83886080,85983692,85983692,83886080,83886177,83886177,83886185, 83886185,83886191,83886191,83886197,83886197,83886197,83886197,83886197,83886197,83886197,83886197,83886197,83886197,83886080,83886177,83886177, 83886177,83886177,85983462,85983462,85983717,83886080,83886183,83886183,83886187,83886187,83886191,83886191,83886191,83886191,85983890,85983890, 83886186,85983731,85983731,83886080,83886183,83886183,85983637,85983679,83886190,83886190,83886177,83886177,85983462,85983462,85983480,85983480, 83886177,83886177,83886177,83886177,83886181,83886181,83886181,83886181,83886185,83886185,83886185,83886185,83886191,83886191,83886191,83886191, 83886194,83886194,83886194,83886194,83886197,83886197,83886197,83886197,83886195,83886195,83886196,83886196,85983773,83886080,83886184,83886184, 85983646,83886080,85983779,83886080,85983781,83886080,83886177,83886177,83886181,83886181,83886191,83886191,83886191,83886191,83886191,83886191, 83886191,83886191,83886201,83886201,83886080,83886080,83886080,83886080,83886080,83886080,88091749,85983804,83886080,85983642,88091750,83886080, 83886080,85983810,83886080,85983616,85983881,85983884,85983815,83886080,85983817,83886080,85983819,83886080,85983821,83886080,85983823,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 85984113,83886080,85984115,83886080,85983929,83886080,85984119,83886080,83886080,83886080,83886080,83886080,83886080,83886080,59,85984243, 83886080,83886080,83886080,83886080,83886080,85983400,85984177,2097335,85984181,85984183,85984185,83886080,85984191,83886080,85984197,85984201, 85984185,85984177,85984178,85984179,85984180,85984181,85984182,85984183,85984184,85984185,85984186,85984187,85984188,85984189,85984190,85984191, 85984192,85984193,83886080,85984195,85984196,85984197,85984198,85984199,85984200,85984201,85984185,85984197,85984177,85984181,85984183,85984185, 85984197,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,85984185,85984197,85984191,85984197,85984201,85984215, 83886080,83886080,83886080,85984210,85984210,83886080,83886080,83886080,85984217,83886080,85984219,83886080,85984221,83886080,85984223,83886080, 85984225,83886080,85984227,83886080,85984229,83886080,85984231,83886080,85984233,83886080,85984235,83886080,85984237,83886080,85984239,83886080, 83886080,83886080,83886080,83886080,85984184,83886080,83886080,85984248,83886080,85984242,85984251,83886080,83886080,85984123,85984124,85984125, 85984309,85984309,85984338,85984307,85984340,85984341,85984342,85984342,85984344,85984345,85984346,85984347,85984314,85984312,85984323,85984351, 85984304,85984305,85984306,85984307,85984308,85984309,85984310,85984311,85984312,85984312,85984314,85984315,85984316,85984317,85984318,85984319, 85984320,85984321,85984322,85984323,85984324,85984325,85984326,85984327,85984328,85984329,85984330,85984331,85984332,85984333,85984334,85984335, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,85984312,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 85984309,85984309,83886080,85984307,83886080,83886080,83886080,85984342,83886080,83886080,83886080,83886080,85984314,85984312,85984323,83886080, 85984353,83886080,85984355,83886080,85984357,83886080,85984359,83886080,85984361,83886080,85984363,83886080,85984365,83886080,85984367,83886080, 85984369,83886080,85984371,83886080,85984373,83886080,85984373,85984373,85984377,83886080,85984379,83886080,85984381,83886080,85984383,83886080, 85984385,83886080,83886080,50331680,50331680,50331680,50331680,50331680,83886080,83886080,85984395,83886080,85984397,83886080,85984399,83886080, 85984401,83886080,85984403,83886080,85984405,83886080,85984407,83886080,85984409,83886080,85984411,83886080,85984413,83886080,85984415,83886080, 85984417,83886080,85984419,83886080,85984421,83886080,85984423,83886080,85984425,83886080,85984427,83886080,85984429,83886080,85984431,83886080, 85984433,83886080,85984435,83886080,85984437,83886080,85984439,83886080,85984441,83886080,85984443,83886080,85984445,83886080,85984447,83886080, 85984463,85984310,85984310,85984452,83886080,85984454,83886080,85984456,83886080,85984458,83886080,85984460,83886080,85984462,83886080,83886080, 85984304,85984304,85984304,85984304,85984469,83886080,85984309,85984309,85984473,83886080,85984473,85984473,85984310,85984310,85984311,85984311, 85984481,83886080,85984312,85984312,85984312,85984312,85984318,85984318,85984489,83886080,85984489,85984489,85984333,85984333,85984323,85984323, 85984323,85984323,85984323,85984323,85984327,85984327,85984503,83886080,85984331,85984331,85984507,83886080,85984509,83886080,85984511,83886080, 85984513,83886080,85984515,83886080,85984517,83886080,85984519,83886080,85984521,83886080,85984523,83886080,85984525,83886080,85984527,83886080, 85984529,83886080,85984531,83886080,85984533,83886080,85984535,83886080,85984537,83886080,85984539,83886080,85984541,83886080,85984543,83886080, 85984545,83886080,85984547,83886080,85984549,83886080,85984551,83886080,85984553,83886080,85984555,83886080,85984557,83886080,85984559,83886080, 83886080,85984609,85984610,85984611,85984612,85984613,85984614,85984615,85984616,85984617,85984618,85984619,85984620,85984621,85984622,85984623, 85984624,85984625,85984626,85984627,85984628,85984629,85984630,85984631,85984632,85984633,85984634,85984635,85984636,85984637,85984638,85984639, 85984640,85984641,85984642,85984643,85984644,85984645,85984646,83886080,83886080,83886080,0,0,0,0,0,0, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,0,0,83886080,83886080,83886080,83886080,83886080, 83886080,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,0,50331680, 0,50331680,50331680,0,50331680,50331680,0,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,0,0,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 33554432,33554432,33554432,33554432,33554432,33554432,83886080,83886080,83886080,0,0,83886080,0,0,83886080,83886080, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,0,33554432,83886080,0,0, 83886080,83886080,85984807,85984807,85984840,85984807,85984842,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,50331680,50331680,50331680,50331680,50331680, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,0,0,0,0,83886080,83886080, 50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 85984981,83886080,85984961,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,85984978,0,83886080,50331680,50331680,50331680,50331680,50331680,50331680,50331680,33554432,83886080,50331680, 50331680,50331680,50331680,50331680,50331680,83886080,83886080,50331680,50331680,83886080,50331680,50331680,50331680,50331680,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,83886080,33554432, 83886080,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,50331680,50331680,50331680,50331680,50331680, 50331680,50331680,50331680,50331680,83886080,83886080,83886080,0,0,0,83886080,83886080,83886080,50331680,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,50331680,50331680,50331680,50331680,83886080,50331680,50331680,50331680,50331680,50331680, 50331680,50331680,50331680,50331680,83886080,50331680,50331680,50331680,83886080,50331680,50331680,50331680,50331680,50331680,83886080,83886080, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,50331680,50331680,50331680,83886080,83886080,0,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 50331680,50331680,33554432,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 50331680,50331680,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,88082728,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,88082736,83886080,83886080,88082739,83886080,83886080,83886080,83886080,83886080,50331680,83886080,50331680,83886080,83886080,83886080, 83886080,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,83886080,83886080,83886080,83886080,50331680,83886080,83886080, 83886080,50331680,50331680,50331680,50331680,50331680,50331680,50331680,88082709,88082710,88082711,88082716,88082721,88082722,88082731,88082735, 83886080,83886080,50331680,50331680,0,0,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 0,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,50331680,83886080,83886080,83886080, 83886080,50331680,50331680,50331680,50331680,83886080,83886080,83886080,83886080,83886080,83886080,96471495,96471495,50331680,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,88082849,88082850,83886080,88082863, 83886080,83886080,50331680,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,0,50331680,83886080, 83886080,50331680,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,88082994,83886080,83886080,88083000,83886080,83886080,83886080,83886080,83886080,50331680,83886080,83886080,83886080, 83886080,50331680,50331680,83886080,83886080,83886080,83886080,50331680,50331680,83886080,83886080,50331680,50331680,50331680,83886080,83886080, 83886080,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,88082966,88082967,88082972,83886080,83886080,88082987,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 50331680,50331680,83886080,83886080,83886080,50331680,0,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,50331680,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,50331680,83886080,83886080,83886080, 83886080,50331680,50331680,50331680,50331680,50331680,83886080,50331680,50331680,83886080,83886080,83886080,83886080,50331680,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,50331680,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 0,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,50331680,50331680,50331680,50331680,50331680,50331680, 83886080,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,50331680,83886080,83886080,50331680, 83886080,50331680,50331680,50331680,50331680,83886080,83886080,83886080,88083271,83886080,83886080,96471879,96471879,50331680,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,50331680,83886080,83886080,83886080,83886080,83886080,88083233,88083234,83886080,83886080, 83886080,83886080,50331680,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,96471954,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,96472006,96472007,96472006,50331680,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 50331680,83886080,83886080,83886080,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,50331680,50331680, 50331680,83886080,83886080,83886080,83886080,83886080,50331680,50331680,50331680,83886080,50331680,50331680,50331680,50331680,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,50331680,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,50331680,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,50331680,83886080,83886080,0,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,50331680,83886080,83886080,50331680, 88083669,83886080,83886080,83886080,83886080,83886080,50331680,88083669,88083670,83886080,88083650,96472258,50331680,50331680,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,50331680,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 50331680,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,50331680,50331680,83886080,83886080,83886080, 83886080,50331680,50331680,50331680,50331680,83886080,83886080,83886080,83886080,83886080,96472390,96472391,96472390,50331680,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,50331680,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,50331680,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,50331680,50331680,50331680,83886080,50331680,83886080,83886080,83886080,88083929,83886080,96472537,96472537,96472537,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,0,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,50331680,83886080,83886080,50331680,50331680,50331680,50331680,50331680,50331680,50331680,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,0, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,0,0,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,50331680,83886080,83886080,50331680,50331680,50331680,50331680,50331680,50331680,83886080,50331680,50331680,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,50331680,50331680,50331680,50331680,50331680,50331680,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,83886080,0,83886080,83886080,83886080,50331680,50331680,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,50331680,83886080,50331680,83886080,50331680,0,0,0,0,83886080,83886080, 83886080,83886080,83886080,88084290,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,88084300,83886080,83886080, 83886080,83886080,88084305,83886080,83886080,83886080,83886080,88084310,83886080,83886080,83886080,83886080,88084315,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,88084288,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,83886080, 50331680,50331680,50331680,50331680,50331680,0,50331680,50331680,83886080,83886080,83886080,83886080,83886080,50331680,50331680,50331680, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,83886080,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 0,0,0,0,0,83886080,83886080,83886080,83886080,0,0,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,88084517,83886080,83886080,83886080,83886080,83886080,83886080,50331680,50331680,50331680, 50331680,83886080,50331680,50331680,50331680,50331680,50331680,50331680,83886080,50331680,50331680,83886080,83886080,50331680,50331680,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,0,0,0,0,0,0, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,50331680,50331680,83886080,83886080,83886080,83886080,50331680,50331680, 50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,50331680,50331680,50331680,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,50331680,83886080,83886080,50331680,50331680,83886080,83886080,83886080,83886080,83886080,83886080,50331680,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,50331680,83886080,83886080, 88091904,88091905,88091906,88091907,88091908,88091909,88091910,88091911,88091912,88091913,88091914,88091915,88091916,88091917,88091918,88091919, 88091920,88091921,88091922,88091923,88091924,88091925,88091926,88091927,88091928,88091929,88091930,88091931,88091932,88091933,88091934,88091935, 88091936,88091937,88091938,88091939,88091940,88091941,83886080,88091943,83886080,83886080,83886080,83886080,83886080,88091949,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,0,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,50331680,50331680,50331680, 0,0,0,0,0,0,0,0,0,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 88124272,88124273,88124274,88124275,88124276,88124277,88124278,88124279,88124280,88124281,88124282,88124283,88124284,88124285,88124286,88124287, 88124288,88124289,88124290,88124291,88124292,88124293,88124294,88124295,88124296,88124297,88124298,88124299,88124300,88124301,88124302,88124303, 88124304,88124305,88124306,88124307,88124308,88124309,88124310,88124311,88124312,88124313,88124314,88124315,88124316,88124317,88124318,88124319, 88124320,88124321,88124322,88124323,88124324,88124325,88124326,88124327,88124328,88124329,88124330,88124331,88124332,88124333,88124334,88124335, 88124336,88124337,88124338,88124339,88124340,88124341,88124342,88124343,88124344,88124345,88124346,88124347,88124348,88124349,88124350,88124351, 88085496,88085497,88085498,88085499,88085500,88085501,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 0,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,0,0,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 67108896,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,0,0,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,0,0,0,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,50331680,50331680,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,50331680,50331680,50331680,0,0,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,50331680,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,50331680,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,50331680,50331680,83886080,50331680,50331680,50331680,50331680,50331680,50331680,50331680,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,50331680,83886080,83886080,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 50331680,50331680,50331680,50331680,0,0,0,83886080,0,0,0,83886080,83886080,50331680,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 0,0,0,0,0,0,0,0,0,0,0,50331680,50331680,50331680,33554432,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,50331680,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,50331680,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 50331680,50331680,50331680,83886080,83886080,83886080,83886080,50331680,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,50331680,83886080,83886080,83886080,83886080,83886080,83886080,50331680,50331680,50331680,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,0,0,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,50331680,50331680,83886080,83886080,50331680,83886080,83886080,0,0, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,50331680,83886080,50331680,50331680,50331680,50331680,50331680,50331680,50331680,83886080, 50331680,83886080,50331680,83886080,83886080,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,83886080,83886080,83886080, 83886080,83886080,83886080,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,83886080,83886080,50331680, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 0,0,0,0,0,0,0,83886080,0,0,0,0,0,0,83886080,83886080, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 50331680,50331680,50331680,50331680,83886080,83886080,96475909,83886080,96475911,83886080,96475913,83886080,96475915,83886080,96475917,83886080, 83886080,83886080,96475921,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,50331680,83886080,50331680,50331680,50331680,50331680,50331680,88087349,50331680,88087349,83886080,83886080, 96475966,96475967,50331680,88087349,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,0,0,0,0,0,0, 0,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,50331680,50331680,50331680,50331680,50331680, 50331680,50331680,50331680,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 50331680,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,50331680,50331680,50331680,50331680,83886080,83886080,50331680,50331680,83886080,50331680,50331680,50331680,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,50331680,83886080,50331680,50331680,83886080,83886080,83886080,50331680,83886080,50331680, 50331680,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,0,0,0,0, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,50331680,50331680,50331680,50331680, 50331680,50331680,50331680,50331680,83886080,83886080,50331680,50331680,83886080,83886080,83886080,0,0,0,0,0, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,0,0, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 88084688,88084689,88084690,88084691,88084692,88084693,88084694,88084695,88084696,88084697,88084698,88084699,88084700,88084701,88084702,88084703, 88084704,88084705,88084706,88084707,88084708,88084709,88084710,88084711,88084712,88084713,88084714,88084715,88084716,88084717,88084718,88084719, 88084720,88084721,88084722,88084723,88084724,88084725,88084726,88084727,88084728,88084729,88084730,83886080,83886080,88084733,88084734,88084735, 0,0,0,0,0,0,0,0,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 50331680,50331680,50331680,0,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 50331680,83886080,50331680,50331680,50331680,50331680,50331680,50331680,50331680,83886080,83886080,83886080,83886080,50331680,83886080,83886080, 83886080,83886080,83886080,83886080,50331680,83886080,83886080,83886080,50331680,50331680,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,83886080,50331680,50331680,50331680,50331680,50331680, 83886177,83886177,83886178,83886178,83886178,83886178,83886178,83886178,83886179,83886179,83886180,83886180,83886180,83886180,83886180,83886180, 83886180,83886180,83886180,83886180,83886181,83886181,83886181,83886181,83886181,83886181,83886181,83886181,83886181,83886181,83886182,83886182, 83886183,83886183,83886184,83886184,83886184,83886184,83886184,83886184,83886184,83886184,83886184,83886184,83886185,83886185,83886185,83886185, 83886187,83886187,83886187,83886187,83886187,83886187,83886188,83886188,83886188,83886188,83886188,83886188,83886188,83886188,83886189,83886189, 83886189,83886189,83886189,83886189,83886190,83886190,83886190,83886190,83886190,83886190,83886190,83886190,83886191,83886191,83886191,83886191, 83886191,83886191,83886191,83886191,83886192,83886192,83886192,83886192,83886194,83886194,83886194,83886194,83886194,83886194,83886194,83886194, 83886195,83886195,83886195,83886195,83886195,83886195,83886195,83886195,83886195,83886195,83886196,83886196,83886196,83886196,83886196,83886196, 83886196,83886196,83886197,83886197,83886197,83886197,83886197,83886197,83886197,83886197,83886197,83886197,83886198,83886198,83886198,83886198, 83886199,83886199,83886199,83886199,83886199,83886199,83886199,83886199,83886199,83886199,83886200,83886200,83886200,83886200,83886201,83886201, 83886202,83886202,83886202,83886202,83886202,83886202,83886184,83886196,83886199,83886201,83886080,85983615,83886080,83886080,85983455,83886080, 83886177,83886177,83886177,83886177,83886177,83886177,83886177,83886177,83886177,83886177,83886177,83886177,83886177,83886177,83886177,83886177, 83886177,83886177,83886177,83886177,83886177,83886177,83886177,83886177,83886181,83886181,83886181,83886181,83886181,83886181,83886181,83886181, 83886181,83886181,83886181,83886181,83886181,83886181,83886181,83886181,83886185,83886185,83886185,83886185,83886191,83886191,83886191,83886191, 83886191,83886191,83886191,83886191,83886191,83886191,83886191,83886191,83886191,83886191,83886191,83886191,83886191,83886191,83886191,83886191, 83886191,83886191,83886191,83886191,83886197,83886197,83886197,83886197,83886197,83886197,83886197,83886197,83886197,83886197,83886197,83886197, 83886197,83886197,83886201,83886201,83886201,83886201,83886201,83886201,83886201,83886201,88088315,83886080,88088317,83886080,88088319,83886080, 85984177,85984177,85984177,85984177,85984177,85984177,85984177,85984177,85984177,85984177,85984177,85984177,85984177,85984177,85984177,85984177, 85984181,85984181,85984181,85984181,85984181,85984181,83886080,83886080,85984181,85984181,85984181,85984181,85984181,85984181,83886080,83886080, 85984183,85984183,85984183,85984183,85984183,85984183,85984183,85984183,85984183,85984183,85984183,85984183,85984183,85984183,85984183,85984183, 85984185,85984185,85984185,85984185,85984185,85984185,85984185,85984185,85984185,85984185,85984185,85984185,85984185,85984185,85984185,85984185, 85984191,85984191,85984191,85984191,85984191,85984191,83886080,83886080,85984191,85984191,85984191,85984191,85984191,85984191,83886080,83886080, 85984197,85984197,85984197,85984197,85984197,85984197,85984197,85984197,83886080,85984197,83886080,85984197,83886080,85984197,83886080,85984197, 85984201,85984201,85984201,85984201,85984201,85984201,85984201,85984201,85984201,85984201,85984201,85984201,85984201,85984201,85984201,85984201, 85984177,85984177,85984181,85984181,85984183,85984183,85984185,85984185,85984191,85984191,85984197,85984197,85984201,85984201,83886080,83886080, 85984177,85984177,85984177,85984177,85984177,85984177,85984177,85984177,85984177,85984177,85984177,85984177,85984177,85984177,85984177,85984177, 85984183,85984183,85984183,85984183,85984183,85984183,85984183,85984183,85984183,85984183,85984183,85984183,85984183,85984183,85984183,85984183, 85984201,85984201,85984201,85984201,85984201,85984201,85984201,85984201,85984201,85984201,85984201,85984201,85984201,85984201,85984201,85984201, 85984177,85984177,85984177,85984177,85984177,83886080,85984177,85984177,85984177,85984177,85984177,85984177,85984177,83886080,85984185,83886080, 83886080,85983400,85984183,85984183,85984183,83886080,85984183,85984183,85984181,85984181,85984183,85984183,85984183,88088511,88088511,88088511, 85984185,85984185,85984185,85984185,83886080,83886080,85984185,85984185,85984185,85984185,85984185,85984185,83886080,88088574,88088574,88088574, 85984197,85984197,85984197,85984197,85984193,85984193,85984197,85984197,85984197,85984197,85984197,85984197,85984193,85983400,85983400,16777312, 83886080,83886080,85984201,85984201,85984201,83886080,85984201,85984201,85984191,85984191,85984201,85984201,85984201,85983412,83886080,83886080, 67108896,67108896,67108896,67108896,67108896,67108896,67108896,67108896,67108896,67108896,67108896,33554432,33554432,33554432,33554432,33554432, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,67108896,67108896,33554432,33554432,33554432,33554432,33554432,67108896, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,83886080,0,0,0,0,0,0,0,0,0,0,0, 0,0,83886080,0,0,0,0,0,0,0,0,0,0,0,0,67108896, 33554432,33554432,33554432,33554432,33554432,83886080,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,0,0,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,0,0,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,83886080,83886080,83886080, 83886080,50331680,83886080,83886080,83886080,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,85984201,83886080,83886080,83886080,83886187,83886177,83886080,83886080,83886080,83886080, 83886080,83886080,88088910,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 88088944,88088945,88088946,88088947,88088948,88088949,88088950,88088951,88088952,88088953,88088954,88088955,88088956,88088957,88088958,88088959, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,88088964,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,88088976,88088978,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,88088980,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,88089040,88089044,88089042, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,88089091,83886080,83886080,83886080,83886080,88089096,83886080,83886080,88089099,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,88089123,83886080,88089125,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,88089148,83886080,83886080,88089155,83886080,83886080,88089157,83886080,88089160,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 16777277,83886080,88089185,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,88089165,16777276,16777278, 88089188,88089189,83886080,83886080,88089202,88089203,83886080,83886080,88089206,88089207,83886080,83886080,83886080,83886080,83886080,83886080, 88089210,88089211,83886080,83886080,88089218,88089219,83886080,83886080,88089222,88089223,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,88089250,88089256,88089257,88089259, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 88089212,88089213,88089233,88089234,83886080,83886080,83886080,83886080,83886080,83886080,88089266,88089267,88089268,88089269,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,0,0,0,0,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,4206600,4206601,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,88089808,88089809,88089810,88089811,88089812,88089813,88089814,88089815,88089816,88089817, 88089818,88089819,88089820,88089821,88089822,88089823,88089824,88089825,88089826,88089827,88089828,88089829,88089830,88089831,88089832,88089833, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,0,0,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,0,0,0,0,0,0,0,0,0,0, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,0,0,0,0,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,0,0,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,88091357,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 88091696,88091697,88091698,88091699,88091700,88091701,88091702,88091703,88091704,88091705,88091706,88091707,88091708,88091709,88091710,88091711, 88091712,88091713,88091714,88091715,88091716,88091717,88091718,88091719,88091720,88091721,88091722,88091723,88091724,88091725,88091726,88091727, 88091728,88091729,88091730,88091731,88091732,88091733,88091734,88091735,88091736,88091737,88091738,88091739,88091740,88091741,88091742,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 88091745,83886080,85983851,88087933,85983869,83886080,83886080,88091752,83886080,88091754,83886080,88091756,83886080,85983825,85983857,85983824, 85983826,83886080,88091763,83886080,83886080,88091766,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,85983807,85983808, 88091777,83886080,88091779,83886080,88091781,83886080,88091783,83886080,88091785,83886080,88091787,83886080,88091789,83886080,88091791,83886080, 88091793,83886080,88091795,83886080,88091797,83886080,88091799,83886080,88091801,83886080,88091803,83886080,88091805,83886080,88091807,83886080, 88091809,83886080,88091811,83886080,88091813,83886080,88091815,83886080,88091817,83886080,88091819,83886080,88091821,83886080,88091823,83886080, 88091825,83886080,88091827,83886080,88091829,83886080,88091831,83886080,88091833,83886080,88091835,83886080,88091837,83886080,88091839,83886080, 88091841,83886080,88091843,83886080,88091845,83886080,88091847,83886080,88091849,83886080,88091851,83886080,88091853,83886080,88091855,83886080, 88091857,83886080,88091859,83886080,88091861,83886080,88091863,83886080,88091865,83886080,88091867,83886080,88091869,83886080,88091871,83886080, 88091873,83886080,88091875,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,88091884,83886080,88091886,83886080,50331680, 50331680,50331680,88091891,83886080,83886080,83886080,83886080,83886080,83886080,0,0,0,0,83886080,0,0, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 0,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,50331680, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,83886080, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 67108896,0,0,0,83886080,83886080,83886080,83886080,0,0,0,0,0,0,0,0, 0,0,83886080,83886080,0,0,0,0,0,0,0,0,0,0,0,0, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,50331680,50331680,50331680,50331680,83886080,83886080, 0,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,0,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,88092747,83886080,88092749,83886080, 88092751,83886080,88092753,83886080,88092755,83886080,88092757,83886080,88092759,83886080,88092761,83886080,88092763,83886080,88092765,83886080, 88092767,83886080,88092769,83886080,83886080,88092772,83886080,88092774,83886080,88092776,83886080,83886080,83886080,83886080,83886080,83886080, 88092783,88092783,83886080,88092786,88092786,83886080,88092789,88092789,83886080,88092792,88092792,83886080,88092795,88092795,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,88092742,83886080,83886080,83886080,83886080,50331680,50331680,83886080,83886080,83886080,88092829,83886080, 0,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,88092843,83886080,88092845,83886080, 88092847,83886080,88092849,83886080,88092851,83886080,88092853,83886080,88092855,83886080,88092857,83886080,88092859,83886080,88092861,83886080, 88092863,83886080,88092865,83886080,83886080,88092868,83886080,88092870,83886080,88092872,83886080,83886080,83886080,83886080,83886080,83886080, 88092879,88092879,83886080,88092882,88092882,83886080,88092885,88092885,83886080,88092888,88092888,83886080,88092891,88092891,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,88092838,83886080,83886080,88092911,88092912,88092913,88092914,0,83886080,83886080,88092925,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,0,0, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,0,0,0, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 88122945,83886080,88122947,83886080,88122949,83886080,88122951,83886080,88122953,83886080,88122955,83886080,88122957,83886080,88122959,83886080, 88122961,83886080,88122963,83886080,88122965,83886080,88122967,83886080,88122969,83886080,88122971,83886080,88122973,83886080,88122975,83886080, 88122977,83886080,88122979,83886080,88122981,83886080,88122983,83886080,88122985,83886080,88122987,83886080,88122989,83886080,83886080,50331680, 83886080,83886080,83886080,0,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,0,83886080, 88123009,83886080,88123011,83886080,88123013,83886080,88123015,83886080,88123017,83886080,88123019,83886080,88123021,83886080,88123023,83886080, 88123025,83886080,88123027,83886080,88123029,83886080,88123031,83886080,88123033,83886080,88123035,83886080,83886080,83886080,50331680,50331680, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 50331680,50331680,0,0,0,0,0,0,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,88123171,83886080,88123173,83886080,88123175,83886080,88123177,83886080,88123179,83886080,88123181,83886080,88123183,83886080, 83886080,83886080,88123187,83886080,88123189,83886080,88123191,83886080,88123193,83886080,88123195,83886080,88123197,83886080,88123199,83886080, 88123201,83886080,88123203,83886080,88123205,83886080,88123207,83886080,88123209,83886080,88123211,83886080,88123213,83886080,88123215,83886080, 88123217,83886080,88123219,83886080,88123221,83886080,88123223,83886080,88123225,83886080,88123227,83886080,88123229,83886080,88123231,83886080, 88123233,83886080,88123235,83886080,88123237,83886080,88123239,83886080,88123241,83886080,88123243,83886080,88123245,83886080,88123247,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,88123258,83886080,88123260,83886080,88087929,88123263,83886080, 88123265,83886080,88123267,83886080,88123269,83886080,88123271,83886080,83886080,83886080,83886080,88123276,83886080,85983845,83886080,83886080, 88123281,83886080,88123283,83886080,83886080,83886080,88123287,83886080,88123289,83886080,88123291,83886080,88123293,83886080,88123295,83886080, 88123297,83886080,88123299,83886080,88123301,83886080,88123303,83886080,88123305,83886080,85983846,85983836,85983841,85983852,85983850,83886080, 85983902,85983879,85983901,88124243,88123317,83886080,88123319,83886080,88123321,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,50331680,83886080,83886080,83886080,50331680,83886080,83886080,83886080,83886080,50331680,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,50331680,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,0,0,0,0,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,50331680,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,0,0, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 50331680,50331680,83886080,83886080,83886080,83886080,83886080,83886080,0,0,0,83886080,0,83886080,83886080,50331680, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,0,0, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 50331680,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,0, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 50331680,50331680,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,50331680,83886080,83886080,50331680,50331680,50331680,50331680,83886080,83886080,50331680,83886080,83886080,83886080, 83886080,0,0,0,0,0,0,0,0,0,0,0,0,0,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,0,0, 83886080,83886080,83886080,83886080,83886080,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,50331680,50331680,50331680,50331680,50331680,50331680,83886080, 83886080,50331680,50331680,83886080,83886080,50331680,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,50331680,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,0,0,0,0, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,50331680,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 50331680,83886080,50331680,50331680,50331680,83886080,83886080,50331680,50331680,83886080,83886080,83886080,83886080,83886080,50331680,50331680, 83886080,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,0,0, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,50331680,50331680,83886080,83886080, 0,0,83886080,83886080,83886080,83886080,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,50331680,83886080,83886080,50331680,83886080,83886080,0,83886080,50331680,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344, 96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344, 96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344, 96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344, 96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344, 96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344, 96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344, 96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344, 96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344, 96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344, 96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344, 96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344, 96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344, 96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344, 96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344, 96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344, 96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344, 96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344, 96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344, 96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344, 96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344, 96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344, 96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344, 96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344, 96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344, 96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344, 96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344, 96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344, 96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344, 96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344, 96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344, 96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344, 96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344, 96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344, 96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344, 96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344, 96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473344,96473345,96473345,96473345,96473345, 96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345, 96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345, 96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345, 96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345, 96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345, 96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345, 96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345, 96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345, 96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345, 96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345, 96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345, 96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345, 96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345, 96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345, 96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345, 96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345, 96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345, 96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345, 96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345, 96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345, 96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345, 96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345, 96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345, 96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345, 96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345, 96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345, 96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345, 96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345, 96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345, 96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345, 96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345, 96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345, 96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345, 96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345, 96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345, 96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345, 96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473345,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346, 96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346, 96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346, 96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346, 96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346, 96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346, 96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346, 96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346, 96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346, 96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346, 96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346, 96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346, 96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346, 96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346, 96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346, 96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346, 96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346, 96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346, 96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346, 96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346, 96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346, 96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346, 96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346, 96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346, 96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346, 96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346, 96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346, 96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346, 96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346, 96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346, 96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346, 96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346, 96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346, 96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346, 96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346, 96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346, 96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346,96473346, 96473346,96473346,96473346,96473346,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347, 96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347, 96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347, 96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347, 96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347, 96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347, 96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347, 96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347, 96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347, 96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347, 96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347, 96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347, 96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347, 96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347, 96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347, 96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347, 96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347, 96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347, 96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347, 96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347, 96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347, 96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347, 96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347, 96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347, 96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347, 96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347, 96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347, 96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347, 96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347, 96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347, 96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347, 96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347, 96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347, 96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347, 96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347, 96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347, 96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347,96473347, 96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348, 96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348, 96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348, 96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348, 96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348, 96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348, 96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348, 96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348, 96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348, 96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348, 96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348, 96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348, 96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348, 96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348, 96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348, 96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348, 96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348, 96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348, 96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348, 96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348, 96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348, 96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348, 96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348, 96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348, 96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348, 96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348, 96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348, 96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348, 96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348, 96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348, 96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348, 96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348, 96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348, 96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348, 96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348, 96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348, 96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473348,96473349,96473349,96473349,96473349, 96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349, 96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349, 96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349, 96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349, 96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349, 96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349, 96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349, 96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349, 96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349, 96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349, 96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349, 96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349, 96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349, 96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349, 96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349, 96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349, 96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349, 96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349, 96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349, 96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349, 96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349, 96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349, 96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349, 96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349, 96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349, 96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349, 96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349, 96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349, 96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349, 96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349, 96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349, 96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349, 96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349, 96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349, 96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349, 96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349, 96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473349,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350, 96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350, 96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350, 96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350, 96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350, 96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350, 96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350, 96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350, 96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350, 96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350, 96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350, 96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350, 96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350, 96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350, 96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350, 96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350, 96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350, 96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350, 96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350, 96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350, 96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350, 96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350, 96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350, 96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350, 96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350, 96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350, 96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350, 96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350, 96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350, 96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350, 96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350, 96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350, 96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350, 96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350, 96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350, 96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350, 96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350,96473350, 96473350,96473350,96473350,96473350,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351, 96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351, 96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351, 96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351, 96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351, 96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351, 96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351, 96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351, 96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351, 96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351, 96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351, 96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351, 96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351, 96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351, 96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351, 96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351, 96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351, 96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351, 96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351, 96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351, 96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351, 96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351, 96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351, 96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351, 96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351, 96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351, 96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351, 96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351, 96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351, 96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351, 96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351, 96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351, 96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351, 96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351, 96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351, 96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351, 96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351,96473351, 96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352, 96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352, 96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352, 96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352, 96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352, 96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352, 96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352, 96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352, 96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352, 96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352, 96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352, 96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352, 96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352, 96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352, 96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352, 96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352, 96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352, 96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352, 96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352, 96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352, 96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352, 96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352, 96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352, 96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352, 96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352, 96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352, 96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352, 96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352, 96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352, 96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352, 96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352, 96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352, 96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352, 96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352, 96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352, 96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352, 96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473352,96473353,96473353,96473353,96473353, 96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353, 96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353, 96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353, 96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353, 96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353, 96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353, 96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353, 96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353, 96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353, 96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353, 96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353, 96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353, 96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353, 96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353, 96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353, 96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353, 96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353, 96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353, 96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353, 96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353, 96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353, 96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353, 96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353, 96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353, 96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353, 96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353, 96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353, 96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353, 96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353, 96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353, 96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353, 96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353, 96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353, 96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353, 96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353, 96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353, 96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473353,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354, 96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354, 96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354, 96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354, 96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354, 96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354, 96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354, 96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354, 96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354, 96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354, 96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354, 96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354, 96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354, 96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354, 96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354, 96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354, 96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354, 96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354, 96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354, 96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354, 96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354, 96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354, 96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354, 96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354, 96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354, 96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354, 96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354, 96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354, 96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354, 96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354, 96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354, 96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354, 96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354, 96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354, 96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354, 96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354, 96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354,96473354, 96473354,96473354,96473354,96473354,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355, 96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355, 96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355, 96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355, 96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355, 96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355, 96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355, 96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355, 96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355, 96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355, 96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355, 96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355, 96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355, 96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355, 96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355, 96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355, 96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355, 96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355, 96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355, 96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355, 96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355, 96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355, 96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355, 96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355, 96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355, 96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355, 96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355, 96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355, 96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355, 96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355, 96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355, 96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355, 96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355, 96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355, 96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355, 96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355, 96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355,96473355, 96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356, 96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356, 96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356, 96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356, 96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356, 96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356, 96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356, 96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356, 96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356, 96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356, 96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356, 96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356, 96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356, 96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356, 96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356, 96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356, 96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356, 96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356, 96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356, 96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356, 96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356, 96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356, 96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356, 96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356, 96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356, 96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356, 96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356, 96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356, 96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356, 96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356, 96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356, 96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356, 96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356, 96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356, 96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356, 96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356, 96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473356,96473357,96473357,96473357,96473357, 96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357, 96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357, 96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357, 96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357, 96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357, 96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357, 96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357, 96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357, 96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357, 96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357, 96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357, 96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357, 96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357, 96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357, 96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357, 96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357, 96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357, 96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357, 96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357, 96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357, 96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357, 96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357, 96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357, 96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357, 96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357, 96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357, 96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357, 96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357, 96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357, 96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357, 96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357, 96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357, 96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357, 96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357, 96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357, 96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357, 96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473357,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358, 96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358, 96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358, 96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358, 96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358, 96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358, 96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358, 96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358, 96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358, 96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358, 96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358, 96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358, 96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358, 96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358, 96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358, 96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358, 96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358, 96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358, 96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358, 96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358, 96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358, 96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358, 96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358, 96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358, 96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358, 96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358, 96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358, 96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358, 96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358, 96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358, 96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358, 96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358, 96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358, 96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358, 96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358, 96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358, 96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358,96473358, 96473358,96473358,96473358,96473358,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359, 96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359, 96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359, 96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359, 96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359, 96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359, 96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359, 96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359, 96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359, 96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359, 96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359, 96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359, 96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359, 96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359, 96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359, 96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359, 96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359, 96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359, 96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359, 96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359, 96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359, 96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359, 96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359, 96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359, 96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359, 96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359, 96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359, 96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359, 96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359, 96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359, 96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359, 96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359, 96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359, 96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359, 96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359, 96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359, 96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359,96473359, 96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360, 96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360, 96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360, 96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360, 96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360, 96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360, 96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360, 96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360, 96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360, 96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360, 96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360, 96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360, 96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360, 96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360, 96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360, 96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360, 96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360, 96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360, 96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360, 96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360, 96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360, 96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360, 96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360, 96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360, 96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360, 96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360, 96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360, 96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360, 96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360, 96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360, 96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360, 96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360, 96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360, 96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360, 96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360, 96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360, 96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473360,96473361,96473361,96473361,96473361, 96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361, 96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361, 96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361, 96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361, 96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361, 96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361, 96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361, 96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361, 96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361, 96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361, 96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361, 96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361, 96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361, 96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361, 96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361, 96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361, 96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361, 96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361, 96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361, 96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361, 96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361, 96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361, 96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361, 96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361, 96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361, 96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361, 96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361, 96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361, 96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361, 96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361, 96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361, 96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361, 96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361, 96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361, 96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361, 96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361, 96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473361,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362, 96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362, 96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362, 96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362, 96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362, 96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362, 96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362, 96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362, 96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362, 96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362, 96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362, 96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362, 96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362, 96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362, 96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362, 96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362, 96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362, 96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362, 96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362, 96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362, 96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362, 96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362, 96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362, 96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362, 96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362, 96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362, 96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362, 96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362, 96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362, 96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362, 96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362, 96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362, 96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362, 96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362, 96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362, 96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362, 96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362,96473362, 96473362,96473362,96473362,96473362,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 4230216,4220660,4230858,4230344,4222673,4214322,4215781,4235164,4235164,4217169,4231633,4216199,4217160,4219382,4224617,4226949, 4228671,4229050,4229368,4231311,4221442,4222235,4223193,4223966,4228157,4231530,4233713,4214402,4215669,4221700,4223515,4228653, 4234782,4218192,4222955,4228557,4229476,4219593,4227544,4229151,4218570,4220695,4222314,4223740,4231374,4214662,4215223,4215518, 4220100,4221651,4223504,4224743,4227073,4228614,4228700,4230639,4233010,4234095,4234746,4225164,4225407,4226464,4228041,4231940, 4234879,4229846,4217055,4218628,4226144,4227198,4223586,4225226,4230338,4232951,4217048,4217954,4221459,4222426,4222735,4226351, 4226615,4232779,4215506,4227211,4215260,4215244,4225564,4226494,4228081,4232821,4230016,4219599,4221442,4229886,4214329,4217831, 4218898,4223879,4224368,4215575,4225275,4214719,4218793,4214285,4222156,4220280,4226338,4215747,4216926,4224769,4228169,4229802, 4221882,4231088,4222088,4219646,4227813,4219808,4224357,4214446,4215145,4215241,4221057,4226279,4227695,4229842,4231631,4215541, 4215874,4217203,4218604,4220357,4222974,4225322,4232621,4233834,4234903,4234958,4215451,4220614,4221815,4231010,4218484,4219280, 4219392,4220058,4222755,4223305,4224137,4225482,4226548,4227183,4230950,4228334,4231203,4232010,4215319,4215459,4215997,4223176, 4229314,4229802,4218569,4218869,4219771,4221870,4226110,4223861,4214500,4216569,4217831,4218298,4218908,4223922,4224105,4226970, 4227142,4231732,4232950,4233032,4233240,4214667,4225454,4231604,4232888,4219105,4214406,4215002,4217838,4217919,4220313,4221442, 4223438,4224578,4228348,4231292,4235149,4220552,4232750,4215433,4220795,4220915,4222273,4222620,4224009,4224345,4225131,4226320, 4233310,4215149,4219438,4232824,4214827,4218137,4222442,4230954,4218763,4219204,4220951,4223879,4232838,4215337,4215823,4217957, 4220435,4220750,4221096,4222181,4224006,4224482,4226937,4229327,4229345,4231628,4232930,4215615,4222650,4215837,4223440,4224152, 4228602,4232867,4234327,4234911,4220823,4222411,4227560,4225739,4225824,4226194,4223680,4223129,4229976,4214464,4227894,4215354, 4215303,4218534,4219603,4226262,4217733,4222238,4220596,4230971,4229196,4232781,4229515,4218579,4215104,4216256,0,0, 4216922,0,4220532,0,0,4215262,4223786,4224714,4225340,4225374,4225381,4225423,4233046,4226238,4227005,0, 4228626,0,4229880,0,0,4231224,4231421,0,0,0,4233455,4233468,4233512,4234676,4231390,4232887, 4214702,4215015,4215117,4215497,4215524,4215633,4216221,4216326,4216424,4216896,4217000,4217956,4217966,4219028,4219240,4219278, 4219378,4220239,4220386,4220561,4221061,4222327,4222490,4222754,4223342,4223531,4224034,4225169,4225342,4225353,4225352,4225360, 4225366,4225373,4225421,4225422,4225600,4225665,4225984,4226548,4226569,4226625,4226930,4227077,4227565,4227705,4227705,4228183, 4229392,4229526,4229889,4229945,4230355,4230408,4231094,4231224,4232931,4233215,4233275,4218997,6439662,4227608,0,0, 4214310,4215221,4215144,4214656,4215109,4215168,4215495,4215546,4216221,4216149,4216217,4216290,4216922,4217011,4217156,4217172, 4217442,4217640,4218578,4218585,4218729,4218797,4219096,4219214,4219144,4219278,4219232,4219378,4219444,4219844,4219932,4219986, 4220246,4220532,4220695,4220699,4220758,4221817,4221882,4222273,4222683,4222667,4222754,4223006,4223342,4224935,4223541,4223663, 4223786,4224113,4224262,4224315,4224541,4224543,4224714,4224731,4224756,4224842,4224832,4225228,4225713,4225984,4226171,4226395, 4226548,4226878,4227077,4227922,4228079,4228985,4229441,4229510,4229526,4229823,4229880,4229835,4229889,4229886,4229869,4229945, 4230026,4230408,4230968,4231282,4231577,4231798,4232828,4232931,4233046,4233179,4233215,4233227,4233275,4234002,4235164,6432842, 6432836,6435797,4209565,4210712,4210745,6443593,6446288,6454995,4235075,4235150,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,85984729,50331680,85984754, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,85984745,85984745,85984745,85984745,85984720,85984720, 85984720,85984721,85984722,85984723,85984724,85984725,85984726,83886080,85984728,85984729,85984730,85984731,85984732,83886080,85984734,83886080, 85984736,85984737,83886080,85984739,85984740,83886080,85984742,85984743,85984744,85984745,85984746,85984725,85984721,85984731,85984740,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,0,0, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 0,0,0,0,0,0,0,0,0,0,83886080,83886080,83886080,83886080,83886080,83886080, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,83886080,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,83886080,0,83886080,83886080,83886080,83886080,0,83886080,0,0,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,33554432, 83886080,0,0,0,83886080,0,0,0,0,0,0,83886080,0,0,0,0, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,0,0,83886080,83886080,83886080,0, 0,88145729,88145730,88145731,88145732,88145733,88145734,88145735,88145736,88145737,88145738,88145739,88145740,88145741,88145742,88145743, 88145744,88145745,88145746,88145747,88145748,88145749,88145750,88145751,88145752,88145753,88145754,0,0,0,83886080,0, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,0,83886080,0,83886080,0, 0,0,0,0,0,0,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,33554432,33554432,33554432,83886080,33554432,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 0,0,0,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,50331680,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,50331680,50331680,50331680,50331680,50331680,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,0, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 0,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 90244136,90244137,90244138,90244139,90244140,90244141,90244142,90244143,90244144,90244145,90244146,90244147,90244148,90244149,90244150,90244151, 90244152,90244153,90244154,90244155,90244156,90244157,90244158,90244159,90244160,90244161,90244162,90244163,90244164,90244165,90244166,90244167, 90244168,90244169,90244170,90244171,90244172,90244173,90244174,90244175,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 90244312,90244313,90244314,90244315,90244316,90244317,90244318,90244319,90244320,90244321,90244322,90244323,90244324,90244325,90244326,90244327, 90244328,90244329,90244330,90244331,90244332,90244333,90244334,90244335,90244336,90244337,90244338,90244339,90244340,90244341,90244342,90244343, 90244344,90244345,90244346,90244347,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,0, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,0,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,0, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,0, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,50331680,50331680,50331680,83886080,50331680,50331680,83886080,83886080,83886080,83886080,83886080,50331680,50331680,50331680,50331680, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,50331680,50331680,50331680,83886080,83886080,83886080,83886080,50331680, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 0,0,0,0,0,0,0,0,0,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,0, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,50331680,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 0,0,0,0,0,0,0,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,0,0,0,0,0,0,0, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,0,0,0,0,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 90246336,90246337,90246338,90246339,90246340,90246341,90246342,90246343,90246344,90246345,90246346,90246347,90246348,90246349,90246350,90246351, 90246352,90246353,90246354,90246355,90246356,90246357,90246358,90246359,90246360,90246361,90246362,90246363,90246364,90246365,90246366,90246367, 90246368,90246369,90246370,90246371,90246372,90246373,90246374,90246375,90246376,90246377,90246378,90246379,90246380,90246381,90246382,90246383, 90246384,90246385,90246386,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,50331680,50331680,50331680,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 50331680,83886080,83886080,83886080,83886080,0,0,0,0,0,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,0,0,0,0,0,0,0,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,50331680, 50331680,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,90247321,83886080,90247323,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,90247333,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,50331680,50331680,50331680,50331680,83886080,83886080,50331680,50331680,0,0,33554432,0,0, 0,0,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,33554432,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 50331680,50331680,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,50331680,50331680,50331680,50331680,50331680,83886080,50331680,50331680,50331680, 50331680,50331680,50331680,50331680,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 0,0,0,0,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,50331680,0,0,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 50331680,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,83886080, 83886080,83886080,83886080,83886080,83886080,0,0,0,0,50331680,50331680,50331680,50331680,0,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,0,83886080,0,0,0, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,50331680, 50331680,50331680,83886080,83886080,50331680,83886080,50331680,50331680,0,0,0,0,0,0,50331680,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,0,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,50331680, 83886080,83886080,83886080,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 50331680,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,50331680,50331680,83886080,83886080,83886080, 50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,98636615,98636615,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,50331680,50331680,50331680,50331680,50331680,50331680,50331680,83886080,83886080,83886080, 50331680,50331680,50331680,50331680,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 83886080,83886080,50331680,50331680,50331680,83886080,50331680,83886080,83886080,83886080,83886080,0,0,0,0,0, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,0,83886080,0,50331680,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,50331680,50331680,50331680,50331680,50331680,50331680,83886080,50331680,90248377,98636985,83886080,98636985,50331680, 50331680,83886080,50331680,50331680,83886080,83886080,0,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,50331680,50331680,50331680,50331680,83886080,83886080,83886080,83886080,98637240,98637241,50331680,50331680,83886080,50331680, 50331680,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,83886080,83886080,83886080,83886080,50331680,50331680,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,83886080,83886080,50331680,83886080,50331680, 50331680,0,0,0,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 0,0,0,0,0,0,0,0,0,0,0,0,0,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,50331680,83886080,50331680,83886080,83886080, 50331680,50331680,50331680,50331680,50331680,50331680,83886080,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,50331680,50331680,50331680, 83886080,83886080,50331680,50331680,50331680,50331680,83886080,50331680,50331680,50331680,50331680,50331680,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,0,0,0,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,50331680, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,83886080,50331680,50331680,0,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 90249408,90249409,90249410,90249411,90249412,90249413,90249414,90249415,90249416,90249417,90249418,90249419,90249420,90249421,90249422,90249423, 90249424,90249425,90249426,90249427,90249428,90249429,90249430,90249431,90249432,90249433,90249434,90249435,90249436,90249437,90249438,90249439, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,50331680,50331680,50331680,50331680,50331680,50331680,83886080,83886080,50331680,50331680,50331680,50331680,0, 0,0,0,0,0,0,0,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,50331680,50331680,50331680,50331680,50331680,50331680,83886080,83886080,50331680,50331680,50331680,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,50331680,50331680,50331680,50331680,50331680,50331680, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,83886080,50331680,50331680,0,0,0,83886080,0,0, 0,0,0,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,83886080,50331680,50331680,50331680,50331680,50331680,50331680,83886080,50331680, 83886080,0,0,0,0,0,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 0,0,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,83886080,83886080,50331680,50331680,50331680,50331680,50331680,50331680, 50331680,83886080,50331680,50331680,83886080,50331680,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,50331680,50331680,50331680,50331680,50331680,50331680,83886080,83886080,83886080,50331680,83886080,50331680,50331680,83886080,50331680, 50331680,50331680,50331680,50331680,50331680,50331680,83886080,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 50331680,50331680,83886080,83886080,83886080,50331680,83886080,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,50331680,50331680,83886080,83886080,0,0,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 0,0,0,0,0,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,0,0, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 50331680,50331680,50331680,50331680,50331680,0,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,0,0,0,0,0,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,0,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 90271328,90271329,90271330,90271331,90271332,90271333,90271334,90271335,90271336,90271337,90271338,90271339,90271340,90271341,90271342,90271343, 90271344,90271345,90271346,90271347,90271348,90271349,90271350,90271351,90271352,90271353,90271354,90271355,90271356,90271357,90271358,90271359, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,0,0,0,0,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,50331680, 50331680,50331680,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,50331680,50331680,0, 33554432,33554432,33554432,33554432,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,98685271,98685272, 98685272,98685272,98685272,98685272,98685272,83886080,83886080,50331680,50331680,50331680,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,50331680,50331680,50331680,50331680,50331680, 50331680,50331680,50331680,83886080,83886080,50331680,50331680,50331680,50331680,50331680,50331680,50331680,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,50331680,50331680,50331680,50331680,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,98685369,98685370,98685369,98685370,98685369, 98685370,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,50331680,50331680,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,83886080,83886080,83886080,83886080,50331680,50331680,50331680,50331680,50331680, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,50331680,83886080,83886080,0,0,0,0,0,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,50331680,50331680,50331680,50331680,50331680, 83886080,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,83886080,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,83886080,83886080,50331680,50331680,50331680,50331680,50331680, 50331680,50331680,83886080,50331680,50331680,83886080,50331680,50331680,50331680,50331680,50331680,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 90302754,90302755,90302756,90302757,90302758,90302759,90302760,90302761,90302762,90302763,90302764,90302765,90302766,90302767,90302768,90302769, 90302770,90302771,90302772,90302773,90302774,90302775,90302776,90302777,90302778,90302779,90302780,90302781,90302782,90302783,90302784,90302785, 90302786,90302787,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,50331680,50331680,50331680,50331680,50331680,50331680,50331680,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,0,0, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 4214333,4214328,4214337,6422818,4214624,4214702,4214715,4214786,4214906,4214937,4215015,4214991,4207774,6424122,4215117,4215124, 4215140,4215159,6423836,4207801,4215143,4215181,6423883,4215191,4215204,4214476,4215212,4215221,6459871,4215285,4215299,4207839, 4215355,4215366,4215410,4215415,4207893,4215495,4215497,4215524,4215546,4215557,4215558,4215575,4215625,4215633,4215642,4215667, 4215677,4215679,4215679,4215679,6425132,4223088,4215754,4215775,6425443,4215787,4215793,4215814,4215966,4215864,4215880,4215912, 4215970,4216054,4216080,4216147,4216163,4216196,4216196,4216217,4216235,4216243,4216258,4216598,4216326,4216599,4216401,4216436, 4215303,4217070,4216782,4216820,4216845,4216715,4216882,4216881,4217004,6427876,4217074,4217079,4217094,4217114,4217122,4217186, 6428328,6428394,4217324,4217371,4217383,4217304,4217446,4208366,4208380,4217608,4217662,4217662,6429128,4217795,4217816,4217831, 4217843,6429464,4217855,4217862,4218707,4217890,4208513,4217952,4217966,4218048,4217997,6430180,4218179,6430182,4218222,4218219, 4218236,4218337,4218338,4208687,4218365,4218408,4218429,4218473,4208738,6431107,4208764,4218544,4218547,4218550,4218570,6464402, 4218622,6431537,6431537,4227585,4218658,4218658,4208839,6435512,6447578,4218722,4218731,4208867,4218778,4218829,4218839,4218873, 4219009,4208954,4208924,4219028,6432468,4219079,4219208,4219212,4219214,4219212,4219258,4219278,4219314,4219300,4219311,4219358, 4219378,4219382,4219408,4219419,4219485,4219569,4219604,4219728,6433548,4219709,4219644,4219752,4219779,4219876,6433777,4219938, 4219845,4219817,4209198,4220009,4220030,4220061,4220023,4209260,4220239,4220268,6434826,4220387,4220664,4220489,4209433,4220561, 4209416,4209380,4215186,4215189,4220672,4220572,4227245,4211673,4220695,4220699,4220705,4220766,4220755,6435779,4209481,4220922, 4220805,4221010,4221061,6435949,4221070,4220959,4221204,4209565,4221250,4221347,4221418,4221608,6436515,4221659,4209688,4221729, 6437031,4221780,4209742,4221810,4221855,4221882,4221883,6437517,6429963,6437626,4222030,6438076,4222143,4222157,4222055,4222230, 4222270,4222327,4222273,4222313,4222328,4222341,6438174,4222260,4222511,4222574,4209971,4222667,4222663,6438609,4222457,4222830, 6438750,6438798,4222918,4223033,4223006,4223003,4210070,4223050,4223101,4223095,4223149,6423845,4223301,6439523,4223388,6439851, 4223528,4223541,4223568,6440456,4223616,4223637,6440757,6440980,4223866,4223883,4210348,4223909,4210360,4210360,4224071,4224092, 4224113,4224133,4224202,4210459,4224292,6442038,4224318,6442130,4224368,6431135,4224528,6442913,6442936,6443076,4210684,4210696, 4224756,6443251,6443250,6443289,6443315,4224798,4224799,4224799,4224842,4210745,4224907,4210758,4210838,6444061,4225102,4225164, 4225228,4210915,6444582,4225366,6444698,6444741,4225423,4225515,4210991,4225600,4225610,4225615,6445436,6445735,6445735,4225774, 4211202,6445995,4225990,4225993,4211239,6446208,4226258,4211360,4226280,4226275,4226304,6446982,4226403,4211457,4226503,4226562, 4226629,4211508,6447656,6447687,4211545,6447833,4226938,6447934,4226965,4227066,4227077,6448346,6448419,4227168,6448552,4227184, 6435679,4211669,4227250,4227331,4211723,4227390,4217525,6449063,6449077,6435731,6435740,4227585,4227588,4231070,4211819,4227729, 4227723,4227741,4215475,4227761,4227763,4227773,4227814,6449980,4227813,4227869,4227939,4228013,4227875,4228029,4228071,4228183, 4227923,4228042,4228044,4228060,6450230,6450539,6450389,4212011,4228337,4228339,4228374,6452170,4228452,6450988,4212061,4212065, 6451121,6451410,4212075,4228688,4228700,4228711,4228713,4228777,4228744,4228878,4228834,4228985,4228904,4228971,4228998,4212183, 4229089,4229121,4212217,4229216,4229219,6452839,4229335,4229342,4212277,4229370,4207803,6453422,6453606,4212414,4212423,4229792, 4229869,4230026,4230229,6454440,4230315,4230337,4230427,4230519,6455087,6424580,4230603,4230588,4230640,6424798,4230868,4230968, 6456786,6456813,4231316,4231409,4231441,6457134,4231451,4231736,4231895,4231896,4231804,4232185,4232213,6458362,4232587,4213141, 4232631,6458743,4213222,4232899,4218290,4232995,6459717,6459930,4213358,4213366,4233184,6460426,4213426,6460566,4233227,4233227, 4233257,6460854,4233442,4213555,4233513,4233639,4233666,4233726,4213710,6462256,4234002,4234304,4234493,4213966,4213997,4234599, 6463694,4214008,6463749,6464014,6464145,4234939,4214102,4235001,4235006,4235013,4235023,4235030,4235067,6465024,0,0 }; uint32_t cp_metadata_917505_917999[] = { 33554432,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432, 33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432, 33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432, 33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432, 33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432, 33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432,33554432, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080,83886080, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680, 50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680,50331680 }; aux_codepoint_data_type const aux_codepoint_data[] = { 0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,10711648436224,10819022618624,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,12360915877888,12468290060288,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,13018045874176,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,12910671691776,12910671691776,13018045874176,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,14108967567360,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,14559939133440,14559939133440,14667313315840,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,15182709391360,15182709391360,15251428868096,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,29914447216640,0,29914447216640,0,29914447216640,0,29914447216640,0,29914447216640,0,0, 0,29914447216640,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,29914447216640, 29914447216640}; uint64_t aux_cp_data_44032_55203[] = { 19108309499904, 19108309504424,19108309504425,19108309504426,19108309504427,19108309504428,19108309504429,19108309504430,19108309504431,19108309504432,19108309504433,19108309504434,19108309504435,19108309504436,19108309504437,19108309504438,19108309504439, 19108309504440,19108309504441,19108309504442,19108309504443,19108309504444,19108309504445,19108309504446,19108309504447,19108309504448,19108309504449,19108309504450,19112604467200,19112604471720,19112604471721,19112604471722,19112604471723, 19112604471724,19112604471725,19112604471726,19112604471727,19112604471728,19112604471729,19112604471730,19112604471731,19112604471732,19112604471733,19112604471734,19112604471735,19112604471736,19112604471737,19112604471738,19112604471739, 19112604471740,19112604471741,19112604471742,19112604471743,19112604471744,19112604471745,19112604471746,19116899434496,19116899439016,19116899439017,19116899439018,19116899439019,19116899439020,19116899439021,19116899439022,19116899439023, 19116899439024,19116899439025,19116899439026,19116899439027,19116899439028,19116899439029,19116899439030,19116899439031,19116899439032,19116899439033,19116899439034,19116899439035,19116899439036,19116899439037,19116899439038,19116899439039, 19116899439040,19116899439041,19116899439042,19121194401792,19121194406312,19121194406313,19121194406314,19121194406315,19121194406316,19121194406317,19121194406318,19121194406319,19121194406320,19121194406321,19121194406322,19121194406323, 19121194406324,19121194406325,19121194406326,19121194406327,19121194406328,19121194406329,19121194406330,19121194406331,19121194406332,19121194406333,19121194406334,19121194406335,19121194406336,19121194406337,19121194406338,19125489369088, 19125489373608,19125489373609,19125489373610,19125489373611,19125489373612,19125489373613,19125489373614,19125489373615,19125489373616,19125489373617,19125489373618,19125489373619,19125489373620,19125489373621,19125489373622,19125489373623, 19125489373624,19125489373625,19125489373626,19125489373627,19125489373628,19125489373629,19125489373630,19125489373631,19125489373632,19125489373633,19125489373634,19129784336384,19129784340904,19129784340905,19129784340906,19129784340907, 19129784340908,19129784340909,19129784340910,19129784340911,19129784340912,19129784340913,19129784340914,19129784340915,19129784340916,19129784340917,19129784340918,19129784340919,19129784340920,19129784340921,19129784340922,19129784340923, 19129784340924,19129784340925,19129784340926,19129784340927,19129784340928,19129784340929,19129784340930,19134079303680,19134079308200,19134079308201,19134079308202,19134079308203,19134079308204,19134079308205,19134079308206,19134079308207, 19134079308208,19134079308209,19134079308210,19134079308211,19134079308212,19134079308213,19134079308214,19134079308215,19134079308216,19134079308217,19134079308218,19134079308219,19134079308220,19134079308221,19134079308222,19134079308223, 19134079308224,19134079308225,19134079308226,19138374270976,19138374275496,19138374275497,19138374275498,19138374275499,19138374275500,19138374275501,19138374275502,19138374275503,19138374275504,19138374275505,19138374275506,19138374275507, 19138374275508,19138374275509,19138374275510,19138374275511,19138374275512,19138374275513,19138374275514,19138374275515,19138374275516,19138374275517,19138374275518,19138374275519,19138374275520,19138374275521,19138374275522,19142669238272, 19142669242792,19142669242793,19142669242794,19142669242795,19142669242796,19142669242797,19142669242798,19142669242799,19142669242800,19142669242801,19142669242802,19142669242803,19142669242804,19142669242805,19142669242806,19142669242807, 19142669242808,19142669242809,19142669242810,19142669242811,19142669242812,19142669242813,19142669242814,19142669242815,19142669242816,19142669242817,19142669242818,19146964205568,19146964210088,19146964210089,19146964210090,19146964210091, 19146964210092,19146964210093,19146964210094,19146964210095,19146964210096,19146964210097,19146964210098,19146964210099,19146964210100,19146964210101,19146964210102,19146964210103,19146964210104,19146964210105,19146964210106,19146964210107, 19146964210108,19146964210109,19146964210110,19146964210111,19146964210112,19146964210113,19146964210114,19151259172864,19151259177384,19151259177385,19151259177386,19151259177387,19151259177388,19151259177389,19151259177390,19151259177391, 19151259177392,19151259177393,19151259177394,19151259177395,19151259177396,19151259177397,19151259177398,19151259177399,19151259177400,19151259177401,19151259177402,19151259177403,19151259177404,19151259177405,19151259177406,19151259177407, 19151259177408,19151259177409,19151259177410,19155554140160,19155554144680,19155554144681,19155554144682,19155554144683,19155554144684,19155554144685,19155554144686,19155554144687,19155554144688,19155554144689,19155554144690,19155554144691, 19155554144692,19155554144693,19155554144694,19155554144695,19155554144696,19155554144697,19155554144698,19155554144699,19155554144700,19155554144701,19155554144702,19155554144703,19155554144704,19155554144705,19155554144706,19159849107456, 19159849111976,19159849111977,19159849111978,19159849111979,19159849111980,19159849111981,19159849111982,19159849111983,19159849111984,19159849111985,19159849111986,19159849111987,19159849111988,19159849111989,19159849111990,19159849111991, 19159849111992,19159849111993,19159849111994,19159849111995,19159849111996,19159849111997,19159849111998,19159849111999,19159849112000,19159849112001,19159849112002,19164144074752,19164144079272,19164144079273,19164144079274,19164144079275, 19164144079276,19164144079277,19164144079278,19164144079279,19164144079280,19164144079281,19164144079282,19164144079283,19164144079284,19164144079285,19164144079286,19164144079287,19164144079288,19164144079289,19164144079290,19164144079291, 19164144079292,19164144079293,19164144079294,19164144079295,19164144079296,19164144079297,19164144079298,19168439042048,19168439046568,19168439046569,19168439046570,19168439046571,19168439046572,19168439046573,19168439046574,19168439046575, 19168439046576,19168439046577,19168439046578,19168439046579,19168439046580,19168439046581,19168439046582,19168439046583,19168439046584,19168439046585,19168439046586,19168439046587,19168439046588,19168439046589,19168439046590,19168439046591, 19168439046592,19168439046593,19168439046594,19172734009344,19172734013864,19172734013865,19172734013866,19172734013867,19172734013868,19172734013869,19172734013870,19172734013871,19172734013872,19172734013873,19172734013874,19172734013875, 19172734013876,19172734013877,19172734013878,19172734013879,19172734013880,19172734013881,19172734013882,19172734013883,19172734013884,19172734013885,19172734013886,19172734013887,19172734013888,19172734013889,19172734013890,19177028976640, 19177028981160,19177028981161,19177028981162,19177028981163,19177028981164,19177028981165,19177028981166,19177028981167,19177028981168,19177028981169,19177028981170,19177028981171,19177028981172,19177028981173,19177028981174,19177028981175, 19177028981176,19177028981177,19177028981178,19177028981179,19177028981180,19177028981181,19177028981182,19177028981183,19177028981184,19177028981185,19177028981186,19181323943936,19181323948456,19181323948457,19181323948458,19181323948459, 19181323948460,19181323948461,19181323948462,19181323948463,19181323948464,19181323948465,19181323948466,19181323948467,19181323948468,19181323948469,19181323948470,19181323948471,19181323948472,19181323948473,19181323948474,19181323948475, 19181323948476,19181323948477,19181323948478,19181323948479,19181323948480,19181323948481,19181323948482,19185618911232,19185618915752,19185618915753,19185618915754,19185618915755,19185618915756,19185618915757,19185618915758,19185618915759, 19185618915760,19185618915761,19185618915762,19185618915763,19185618915764,19185618915765,19185618915766,19185618915767,19185618915768,19185618915769,19185618915770,19185618915771,19185618915772,19185618915773,19185618915774,19185618915775, 19185618915776,19185618915777,19185618915778,19189913878528,19189913883048,19189913883049,19189913883050,19189913883051,19189913883052,19189913883053,19189913883054,19189913883055,19189913883056,19189913883057,19189913883058,19189913883059, 19189913883060,19189913883061,19189913883062,19189913883063,19189913883064,19189913883065,19189913883066,19189913883067,19189913883068,19189913883069,19189913883070,19189913883071,19189913883072,19189913883073,19189913883074,19194208845824, 19194208850344,19194208850345,19194208850346,19194208850347,19194208850348,19194208850349,19194208850350,19194208850351,19194208850352,19194208850353,19194208850354,19194208850355,19194208850356,19194208850357,19194208850358,19194208850359, 19194208850360,19194208850361,19194208850362,19194208850363,19194208850364,19194208850365,19194208850366,19194208850367,19194208850368,19194208850369,19194208850370,19108309499904,19108309504424,19108309504425,19108309504426,19108309504427, 19108309504428,19108309504429,19108309504430,19108309504431,19108309504432,19108309504433,19108309504434,19108309504435,19108309504436,19108309504437,19108309504438,19108309504439,19108309504440,19108309504441,19108309504442,19108309504443, 19108309504444,19108309504445,19108309504446,19108309504447,19108309504448,19108309504449,19108309504450,19112604467200,19112604471720,19112604471721,19112604471722,19112604471723,19112604471724,19112604471725,19112604471726,19112604471727, 19112604471728,19112604471729,19112604471730,19112604471731,19112604471732,19112604471733,19112604471734,19112604471735,19112604471736,19112604471737,19112604471738,19112604471739,19112604471740,19112604471741,19112604471742,19112604471743, 19112604471744,19112604471745,19112604471746,19116899434496,19116899439016,19116899439017,19116899439018,19116899439019,19116899439020,19116899439021,19116899439022,19116899439023,19116899439024,19116899439025,19116899439026,19116899439027, 19116899439028,19116899439029,19116899439030,19116899439031,19116899439032,19116899439033,19116899439034,19116899439035,19116899439036,19116899439037,19116899439038,19116899439039,19116899439040,19116899439041,19116899439042,19121194401792, 19121194406312,19121194406313,19121194406314,19121194406315,19121194406316,19121194406317,19121194406318,19121194406319,19121194406320,19121194406321,19121194406322,19121194406323,19121194406324,19121194406325,19121194406326,19121194406327, 19121194406328,19121194406329,19121194406330,19121194406331,19121194406332,19121194406333,19121194406334,19121194406335,19121194406336,19121194406337,19121194406338,19125489369088,19125489373608,19125489373609,19125489373610,19125489373611, 19125489373612,19125489373613,19125489373614,19125489373615,19125489373616,19125489373617,19125489373618,19125489373619,19125489373620,19125489373621,19125489373622,19125489373623,19125489373624,19125489373625,19125489373626,19125489373627, 19125489373628,19125489373629,19125489373630,19125489373631,19125489373632,19125489373633,19125489373634,19129784336384,19129784340904,19129784340905,19129784340906,19129784340907,19129784340908,19129784340909,19129784340910,19129784340911, 19129784340912,19129784340913,19129784340914,19129784340915,19129784340916,19129784340917,19129784340918,19129784340919,19129784340920,19129784340921,19129784340922,19129784340923,19129784340924,19129784340925,19129784340926,19129784340927, 19129784340928,19129784340929,19129784340930,19134079303680,19134079308200,19134079308201,19134079308202,19134079308203,19134079308204,19134079308205,19134079308206,19134079308207,19134079308208,19134079308209,19134079308210,19134079308211, 19134079308212,19134079308213,19134079308214,19134079308215,19134079308216,19134079308217,19134079308218,19134079308219,19134079308220,19134079308221,19134079308222,19134079308223,19134079308224,19134079308225,19134079308226,19138374270976, 19138374275496,19138374275497,19138374275498,19138374275499,19138374275500,19138374275501,19138374275502,19138374275503,19138374275504,19138374275505,19138374275506,19138374275507,19138374275508,19138374275509,19138374275510,19138374275511, 19138374275512,19138374275513,19138374275514,19138374275515,19138374275516,19138374275517,19138374275518,19138374275519,19138374275520,19138374275521,19138374275522,19142669238272,19142669242792,19142669242793,19142669242794,19142669242795, 19142669242796,19142669242797,19142669242798,19142669242799,19142669242800,19142669242801,19142669242802,19142669242803,19142669242804,19142669242805,19142669242806,19142669242807,19142669242808,19142669242809,19142669242810,19142669242811, 19142669242812,19142669242813,19142669242814,19142669242815,19142669242816,19142669242817,19142669242818,19146964205568,19146964210088,19146964210089,19146964210090,19146964210091,19146964210092,19146964210093,19146964210094,19146964210095, 19146964210096,19146964210097,19146964210098,19146964210099,19146964210100,19146964210101,19146964210102,19146964210103,19146964210104,19146964210105,19146964210106,19146964210107,19146964210108,19146964210109,19146964210110,19146964210111, 19146964210112,19146964210113,19146964210114,19151259172864,19151259177384,19151259177385,19151259177386,19151259177387,19151259177388,19151259177389,19151259177390,19151259177391,19151259177392,19151259177393,19151259177394,19151259177395, 19151259177396,19151259177397,19151259177398,19151259177399,19151259177400,19151259177401,19151259177402,19151259177403,19151259177404,19151259177405,19151259177406,19151259177407,19151259177408,19151259177409,19151259177410,19155554140160, 19155554144680,19155554144681,19155554144682,19155554144683,19155554144684,19155554144685,19155554144686,19155554144687,19155554144688,19155554144689,19155554144690,19155554144691,19155554144692,19155554144693,19155554144694,19155554144695, 19155554144696,19155554144697,19155554144698,19155554144699,19155554144700,19155554144701,19155554144702,19155554144703,19155554144704,19155554144705,19155554144706,19159849107456,19159849111976,19159849111977,19159849111978,19159849111979, 19159849111980,19159849111981,19159849111982,19159849111983,19159849111984,19159849111985,19159849111986,19159849111987,19159849111988,19159849111989,19159849111990,19159849111991,19159849111992,19159849111993,19159849111994,19159849111995, 19159849111996,19159849111997,19159849111998,19159849111999,19159849112000,19159849112001,19159849112002,19164144074752,19164144079272,19164144079273,19164144079274,19164144079275,19164144079276,19164144079277,19164144079278,19164144079279, 19164144079280,19164144079281,19164144079282,19164144079283,19164144079284,19164144079285,19164144079286,19164144079287,19164144079288,19164144079289,19164144079290,19164144079291,19164144079292,19164144079293,19164144079294,19164144079295, 19164144079296,19164144079297,19164144079298,19168439042048,19168439046568,19168439046569,19168439046570,19168439046571,19168439046572,19168439046573,19168439046574,19168439046575,19168439046576,19168439046577,19168439046578,19168439046579, 19168439046580,19168439046581,19168439046582,19168439046583,19168439046584,19168439046585,19168439046586,19168439046587,19168439046588,19168439046589,19168439046590,19168439046591,19168439046592,19168439046593,19168439046594,19172734009344, 19172734013864,19172734013865,19172734013866,19172734013867,19172734013868,19172734013869,19172734013870,19172734013871,19172734013872,19172734013873,19172734013874,19172734013875,19172734013876,19172734013877,19172734013878,19172734013879, 19172734013880,19172734013881,19172734013882,19172734013883,19172734013884,19172734013885,19172734013886,19172734013887,19172734013888,19172734013889,19172734013890,19177028976640,19177028981160,19177028981161,19177028981162,19177028981163, 19177028981164,19177028981165,19177028981166,19177028981167,19177028981168,19177028981169,19177028981170,19177028981171,19177028981172,19177028981173,19177028981174,19177028981175,19177028981176,19177028981177,19177028981178,19177028981179, 19177028981180,19177028981181,19177028981182,19177028981183,19177028981184,19177028981185,19177028981186,19181323943936,19181323948456,19181323948457,19181323948458,19181323948459,19181323948460,19181323948461,19181323948462,19181323948463, 19181323948464,19181323948465,19181323948466,19181323948467,19181323948468,19181323948469,19181323948470,19181323948471,19181323948472,19181323948473,19181323948474,19181323948475,19181323948476,19181323948477,19181323948478,19181323948479, 19181323948480,19181323948481,19181323948482,19185618911232,19185618915752,19185618915753,19185618915754,19185618915755,19185618915756,19185618915757,19185618915758,19185618915759,19185618915760,19185618915761,19185618915762,19185618915763, 19185618915764,19185618915765,19185618915766,19185618915767,19185618915768,19185618915769,19185618915770,19185618915771,19185618915772,19185618915773,19185618915774,19185618915775,19185618915776,19185618915777,19185618915778,19189913878528, 19189913883048,19189913883049,19189913883050,19189913883051,19189913883052,19189913883053,19189913883054,19189913883055,19189913883056,19189913883057,19189913883058,19189913883059,19189913883060,19189913883061,19189913883062,19189913883063, 19189913883064,19189913883065,19189913883066,19189913883067,19189913883068,19189913883069,19189913883070,19189913883071,19189913883072,19189913883073,19189913883074,19194208845824,19194208850344,19194208850345,19194208850346,19194208850347, 19194208850348,19194208850349,19194208850350,19194208850351,19194208850352,19194208850353,19194208850354,19194208850355,19194208850356,19194208850357,19194208850358,19194208850359,19194208850360,19194208850361,19194208850362,19194208850363, 19194208850364,19194208850365,19194208850366,19194208850367,19194208850368,19194208850369,19194208850370,19108309499904,19108309504424,19108309504425,19108309504426,19108309504427,19108309504428,19108309504429,19108309504430,19108309504431, 19108309504432,19108309504433,19108309504434,19108309504435,19108309504436,19108309504437,19108309504438,19108309504439,19108309504440,19108309504441,19108309504442,19108309504443,19108309504444,19108309504445,19108309504446,19108309504447, 19108309504448,19108309504449,19108309504450,19112604467200,19112604471720,19112604471721,19112604471722,19112604471723,19112604471724,19112604471725,19112604471726,19112604471727,19112604471728,19112604471729,19112604471730,19112604471731, 19112604471732,19112604471733,19112604471734,19112604471735,19112604471736,19112604471737,19112604471738,19112604471739,19112604471740,19112604471741,19112604471742,19112604471743,19112604471744,19112604471745,19112604471746,19116899434496, 19116899439016,19116899439017,19116899439018,19116899439019,19116899439020,19116899439021,19116899439022,19116899439023,19116899439024,19116899439025,19116899439026,19116899439027,19116899439028,19116899439029,19116899439030,19116899439031, 19116899439032,19116899439033,19116899439034,19116899439035,19116899439036,19116899439037,19116899439038,19116899439039,19116899439040,19116899439041,19116899439042,19121194401792,19121194406312,19121194406313,19121194406314,19121194406315, 19121194406316,19121194406317,19121194406318,19121194406319,19121194406320,19121194406321,19121194406322,19121194406323,19121194406324,19121194406325,19121194406326,19121194406327,19121194406328,19121194406329,19121194406330,19121194406331, 19121194406332,19121194406333,19121194406334,19121194406335,19121194406336,19121194406337,19121194406338,19125489369088,19125489373608,19125489373609,19125489373610,19125489373611,19125489373612,19125489373613,19125489373614,19125489373615, 19125489373616,19125489373617,19125489373618,19125489373619,19125489373620,19125489373621,19125489373622,19125489373623,19125489373624,19125489373625,19125489373626,19125489373627,19125489373628,19125489373629,19125489373630,19125489373631, 19125489373632,19125489373633,19125489373634,19129784336384,19129784340904,19129784340905,19129784340906,19129784340907,19129784340908,19129784340909,19129784340910,19129784340911,19129784340912,19129784340913,19129784340914,19129784340915, 19129784340916,19129784340917,19129784340918,19129784340919,19129784340920,19129784340921,19129784340922,19129784340923,19129784340924,19129784340925,19129784340926,19129784340927,19129784340928,19129784340929,19129784340930,19134079303680, 19134079308200,19134079308201,19134079308202,19134079308203,19134079308204,19134079308205,19134079308206,19134079308207,19134079308208,19134079308209,19134079308210,19134079308211,19134079308212,19134079308213,19134079308214,19134079308215, 19134079308216,19134079308217,19134079308218,19134079308219,19134079308220,19134079308221,19134079308222,19134079308223,19134079308224,19134079308225,19134079308226,19138374270976,19138374275496,19138374275497,19138374275498,19138374275499, 19138374275500,19138374275501,19138374275502,19138374275503,19138374275504,19138374275505,19138374275506,19138374275507,19138374275508,19138374275509,19138374275510,19138374275511,19138374275512,19138374275513,19138374275514,19138374275515, 19138374275516,19138374275517,19138374275518,19138374275519,19138374275520,19138374275521,19138374275522,19142669238272,19142669242792,19142669242793,19142669242794,19142669242795,19142669242796,19142669242797,19142669242798,19142669242799, 19142669242800,19142669242801,19142669242802,19142669242803,19142669242804,19142669242805,19142669242806,19142669242807,19142669242808,19142669242809,19142669242810,19142669242811,19142669242812,19142669242813,19142669242814,19142669242815, 19142669242816,19142669242817,19142669242818,19146964205568,19146964210088,19146964210089,19146964210090,19146964210091,19146964210092,19146964210093,19146964210094,19146964210095,19146964210096,19146964210097,19146964210098,19146964210099, 19146964210100,19146964210101,19146964210102,19146964210103,19146964210104,19146964210105,19146964210106,19146964210107,19146964210108,19146964210109,19146964210110,19146964210111,19146964210112,19146964210113,19146964210114,19151259172864, 19151259177384,19151259177385,19151259177386,19151259177387,19151259177388,19151259177389,19151259177390,19151259177391,19151259177392,19151259177393,19151259177394,19151259177395,19151259177396,19151259177397,19151259177398,19151259177399, 19151259177400,19151259177401,19151259177402,19151259177403,19151259177404,19151259177405,19151259177406,19151259177407,19151259177408,19151259177409,19151259177410,19155554140160,19155554144680,19155554144681,19155554144682,19155554144683, 19155554144684,19155554144685,19155554144686,19155554144687,19155554144688,19155554144689,19155554144690,19155554144691,19155554144692,19155554144693,19155554144694,19155554144695,19155554144696,19155554144697,19155554144698,19155554144699, 19155554144700,19155554144701,19155554144702,19155554144703,19155554144704,19155554144705,19155554144706,19159849107456,19159849111976,19159849111977,19159849111978,19159849111979,19159849111980,19159849111981,19159849111982,19159849111983, 19159849111984,19159849111985,19159849111986,19159849111987,19159849111988,19159849111989,19159849111990,19159849111991,19159849111992,19159849111993,19159849111994,19159849111995,19159849111996,19159849111997,19159849111998,19159849111999, 19159849112000,19159849112001,19159849112002,19164144074752,19164144079272,19164144079273,19164144079274,19164144079275,19164144079276,19164144079277,19164144079278,19164144079279,19164144079280,19164144079281,19164144079282,19164144079283, 19164144079284,19164144079285,19164144079286,19164144079287,19164144079288,19164144079289,19164144079290,19164144079291,19164144079292,19164144079293,19164144079294,19164144079295,19164144079296,19164144079297,19164144079298,19168439042048, 19168439046568,19168439046569,19168439046570,19168439046571,19168439046572,19168439046573,19168439046574,19168439046575,19168439046576,19168439046577,19168439046578,19168439046579,19168439046580,19168439046581,19168439046582,19168439046583, 19168439046584,19168439046585,19168439046586,19168439046587,19168439046588,19168439046589,19168439046590,19168439046591,19168439046592,19168439046593,19168439046594,19172734009344,19172734013864,19172734013865,19172734013866,19172734013867, 19172734013868,19172734013869,19172734013870,19172734013871,19172734013872,19172734013873,19172734013874,19172734013875,19172734013876,19172734013877,19172734013878,19172734013879,19172734013880,19172734013881,19172734013882,19172734013883, 19172734013884,19172734013885,19172734013886,19172734013887,19172734013888,19172734013889,19172734013890,19177028976640,19177028981160,19177028981161,19177028981162,19177028981163,19177028981164,19177028981165,19177028981166,19177028981167, 19177028981168,19177028981169,19177028981170,19177028981171,19177028981172,19177028981173,19177028981174,19177028981175,19177028981176,19177028981177,19177028981178,19177028981179,19177028981180,19177028981181,19177028981182,19177028981183, 19177028981184,19177028981185,19177028981186,19181323943936,19181323948456,19181323948457,19181323948458,19181323948459,19181323948460,19181323948461,19181323948462,19181323948463,19181323948464,19181323948465,19181323948466,19181323948467, 19181323948468,19181323948469,19181323948470,19181323948471,19181323948472,19181323948473,19181323948474,19181323948475,19181323948476,19181323948477,19181323948478,19181323948479,19181323948480,19181323948481,19181323948482,19185618911232, 19185618915752,19185618915753,19185618915754,19185618915755,19185618915756,19185618915757,19185618915758,19185618915759,19185618915760,19185618915761,19185618915762,19185618915763,19185618915764,19185618915765,19185618915766,19185618915767, 19185618915768,19185618915769,19185618915770,19185618915771,19185618915772,19185618915773,19185618915774,19185618915775,19185618915776,19185618915777,19185618915778,19189913878528,19189913883048,19189913883049,19189913883050,19189913883051, 19189913883052,19189913883053,19189913883054,19189913883055,19189913883056,19189913883057,19189913883058,19189913883059,19189913883060,19189913883061,19189913883062,19189913883063,19189913883064,19189913883065,19189913883066,19189913883067, 19189913883068,19189913883069,19189913883070,19189913883071,19189913883072,19189913883073,19189913883074,19194208845824,19194208850344,19194208850345,19194208850346,19194208850347,19194208850348,19194208850349,19194208850350,19194208850351, 19194208850352,19194208850353,19194208850354,19194208850355,19194208850356,19194208850357,19194208850358,19194208850359,19194208850360,19194208850361,19194208850362,19194208850363,19194208850364,19194208850365,19194208850366,19194208850367, 19194208850368,19194208850369,19194208850370,19108309499904,19108309504424,19108309504425,19108309504426,19108309504427,19108309504428,19108309504429,19108309504430,19108309504431,19108309504432,19108309504433,19108309504434,19108309504435, 19108309504436,19108309504437,19108309504438,19108309504439,19108309504440,19108309504441,19108309504442,19108309504443,19108309504444,19108309504445,19108309504446,19108309504447,19108309504448,19108309504449,19108309504450,19112604467200, 19112604471720,19112604471721,19112604471722,19112604471723,19112604471724,19112604471725,19112604471726,19112604471727,19112604471728,19112604471729,19112604471730,19112604471731,19112604471732,19112604471733,19112604471734,19112604471735, 19112604471736,19112604471737,19112604471738,19112604471739,19112604471740,19112604471741,19112604471742,19112604471743,19112604471744,19112604471745,19112604471746,19116899434496,19116899439016,19116899439017,19116899439018,19116899439019, 19116899439020,19116899439021,19116899439022,19116899439023,19116899439024,19116899439025,19116899439026,19116899439027,19116899439028,19116899439029,19116899439030,19116899439031,19116899439032,19116899439033,19116899439034,19116899439035, 19116899439036,19116899439037,19116899439038,19116899439039,19116899439040,19116899439041,19116899439042,19121194401792,19121194406312,19121194406313,19121194406314,19121194406315,19121194406316,19121194406317,19121194406318,19121194406319, 19121194406320,19121194406321,19121194406322,19121194406323,19121194406324,19121194406325,19121194406326,19121194406327,19121194406328,19121194406329,19121194406330,19121194406331,19121194406332,19121194406333,19121194406334,19121194406335, 19121194406336,19121194406337,19121194406338,19125489369088,19125489373608,19125489373609,19125489373610,19125489373611,19125489373612,19125489373613,19125489373614,19125489373615,19125489373616,19125489373617,19125489373618,19125489373619, 19125489373620,19125489373621,19125489373622,19125489373623,19125489373624,19125489373625,19125489373626,19125489373627,19125489373628,19125489373629,19125489373630,19125489373631,19125489373632,19125489373633,19125489373634,19129784336384, 19129784340904,19129784340905,19129784340906,19129784340907,19129784340908,19129784340909,19129784340910,19129784340911,19129784340912,19129784340913,19129784340914,19129784340915,19129784340916,19129784340917,19129784340918,19129784340919, 19129784340920,19129784340921,19129784340922,19129784340923,19129784340924,19129784340925,19129784340926,19129784340927,19129784340928,19129784340929,19129784340930,19134079303680,19134079308200,19134079308201,19134079308202,19134079308203, 19134079308204,19134079308205,19134079308206,19134079308207,19134079308208,19134079308209,19134079308210,19134079308211,19134079308212,19134079308213,19134079308214,19134079308215,19134079308216,19134079308217,19134079308218,19134079308219, 19134079308220,19134079308221,19134079308222,19134079308223,19134079308224,19134079308225,19134079308226,19138374270976,19138374275496,19138374275497,19138374275498,19138374275499,19138374275500,19138374275501,19138374275502,19138374275503, 19138374275504,19138374275505,19138374275506,19138374275507,19138374275508,19138374275509,19138374275510,19138374275511,19138374275512,19138374275513,19138374275514,19138374275515,19138374275516,19138374275517,19138374275518,19138374275519, 19138374275520,19138374275521,19138374275522,19142669238272,19142669242792,19142669242793,19142669242794,19142669242795,19142669242796,19142669242797,19142669242798,19142669242799,19142669242800,19142669242801,19142669242802,19142669242803, 19142669242804,19142669242805,19142669242806,19142669242807,19142669242808,19142669242809,19142669242810,19142669242811,19142669242812,19142669242813,19142669242814,19142669242815,19142669242816,19142669242817,19142669242818,19146964205568, 19146964210088,19146964210089,19146964210090,19146964210091,19146964210092,19146964210093,19146964210094,19146964210095,19146964210096,19146964210097,19146964210098,19146964210099,19146964210100,19146964210101,19146964210102,19146964210103, 19146964210104,19146964210105,19146964210106,19146964210107,19146964210108,19146964210109,19146964210110,19146964210111,19146964210112,19146964210113,19146964210114,19151259172864,19151259177384,19151259177385,19151259177386,19151259177387, 19151259177388,19151259177389,19151259177390,19151259177391,19151259177392,19151259177393,19151259177394,19151259177395,19151259177396,19151259177397,19151259177398,19151259177399,19151259177400,19151259177401,19151259177402,19151259177403, 19151259177404,19151259177405,19151259177406,19151259177407,19151259177408,19151259177409,19151259177410,19155554140160,19155554144680,19155554144681,19155554144682,19155554144683,19155554144684,19155554144685,19155554144686,19155554144687, 19155554144688,19155554144689,19155554144690,19155554144691,19155554144692,19155554144693,19155554144694,19155554144695,19155554144696,19155554144697,19155554144698,19155554144699,19155554144700,19155554144701,19155554144702,19155554144703, 19155554144704,19155554144705,19155554144706,19159849107456,19159849111976,19159849111977,19159849111978,19159849111979,19159849111980,19159849111981,19159849111982,19159849111983,19159849111984,19159849111985,19159849111986,19159849111987, 19159849111988,19159849111989,19159849111990,19159849111991,19159849111992,19159849111993,19159849111994,19159849111995,19159849111996,19159849111997,19159849111998,19159849111999,19159849112000,19159849112001,19159849112002,19164144074752, 19164144079272,19164144079273,19164144079274,19164144079275,19164144079276,19164144079277,19164144079278,19164144079279,19164144079280,19164144079281,19164144079282,19164144079283,19164144079284,19164144079285,19164144079286,19164144079287, 19164144079288,19164144079289,19164144079290,19164144079291,19164144079292,19164144079293,19164144079294,19164144079295,19164144079296,19164144079297,19164144079298,19168439042048,19168439046568,19168439046569,19168439046570,19168439046571, 19168439046572,19168439046573,19168439046574,19168439046575,19168439046576,19168439046577,19168439046578,19168439046579,19168439046580,19168439046581,19168439046582,19168439046583,19168439046584,19168439046585,19168439046586,19168439046587, 19168439046588,19168439046589,19168439046590,19168439046591,19168439046592,19168439046593,19168439046594,19172734009344,19172734013864,19172734013865,19172734013866,19172734013867,19172734013868,19172734013869,19172734013870,19172734013871, 19172734013872,19172734013873,19172734013874,19172734013875,19172734013876,19172734013877,19172734013878,19172734013879,19172734013880,19172734013881,19172734013882,19172734013883,19172734013884,19172734013885,19172734013886,19172734013887, 19172734013888,19172734013889,19172734013890,19177028976640,19177028981160,19177028981161,19177028981162,19177028981163,19177028981164,19177028981165,19177028981166,19177028981167,19177028981168,19177028981169,19177028981170,19177028981171, 19177028981172,19177028981173,19177028981174,19177028981175,19177028981176,19177028981177,19177028981178,19177028981179,19177028981180,19177028981181,19177028981182,19177028981183,19177028981184,19177028981185,19177028981186,19181323943936, 19181323948456,19181323948457,19181323948458,19181323948459,19181323948460,19181323948461,19181323948462,19181323948463,19181323948464,19181323948465,19181323948466,19181323948467,19181323948468,19181323948469,19181323948470,19181323948471, 19181323948472,19181323948473,19181323948474,19181323948475,19181323948476,19181323948477,19181323948478,19181323948479,19181323948480,19181323948481,19181323948482,19185618911232,19185618915752,19185618915753,19185618915754,19185618915755, 19185618915756,19185618915757,19185618915758,19185618915759,19185618915760,19185618915761,19185618915762,19185618915763,19185618915764,19185618915765,19185618915766,19185618915767,19185618915768,19185618915769,19185618915770,19185618915771, 19185618915772,19185618915773,19185618915774,19185618915775,19185618915776,19185618915777,19185618915778,19189913878528,19189913883048,19189913883049,19189913883050,19189913883051,19189913883052,19189913883053,19189913883054,19189913883055, 19189913883056,19189913883057,19189913883058,19189913883059,19189913883060,19189913883061,19189913883062,19189913883063,19189913883064,19189913883065,19189913883066,19189913883067,19189913883068,19189913883069,19189913883070,19189913883071, 19189913883072,19189913883073,19189913883074,19194208845824,19194208850344,19194208850345,19194208850346,19194208850347,19194208850348,19194208850349,19194208850350,19194208850351,19194208850352,19194208850353,19194208850354,19194208850355, 19194208850356,19194208850357,19194208850358,19194208850359,19194208850360,19194208850361,19194208850362,19194208850363,19194208850364,19194208850365,19194208850366,19194208850367,19194208850368,19194208850369,19194208850370,19108309499904, 19108309504424,19108309504425,19108309504426,19108309504427,19108309504428,19108309504429,19108309504430,19108309504431,19108309504432,19108309504433,19108309504434,19108309504435,19108309504436,19108309504437,19108309504438,19108309504439, 19108309504440,19108309504441,19108309504442,19108309504443,19108309504444,19108309504445,19108309504446,19108309504447,19108309504448,19108309504449,19108309504450,19112604467200,19112604471720,19112604471721,19112604471722,19112604471723, 19112604471724,19112604471725,19112604471726,19112604471727,19112604471728,19112604471729,19112604471730,19112604471731,19112604471732,19112604471733,19112604471734,19112604471735,19112604471736,19112604471737,19112604471738,19112604471739, 19112604471740,19112604471741,19112604471742,19112604471743,19112604471744,19112604471745,19112604471746,19116899434496,19116899439016,19116899439017,19116899439018,19116899439019,19116899439020,19116899439021,19116899439022,19116899439023, 19116899439024,19116899439025,19116899439026,19116899439027,19116899439028,19116899439029,19116899439030,19116899439031,19116899439032,19116899439033,19116899439034,19116899439035,19116899439036,19116899439037,19116899439038,19116899439039, 19116899439040,19116899439041,19116899439042,19121194401792,19121194406312,19121194406313,19121194406314,19121194406315,19121194406316,19121194406317,19121194406318,19121194406319,19121194406320,19121194406321,19121194406322,19121194406323, 19121194406324,19121194406325,19121194406326,19121194406327,19121194406328,19121194406329,19121194406330,19121194406331,19121194406332,19121194406333,19121194406334,19121194406335,19121194406336,19121194406337,19121194406338,19125489369088, 19125489373608,19125489373609,19125489373610,19125489373611,19125489373612,19125489373613,19125489373614,19125489373615,19125489373616,19125489373617,19125489373618,19125489373619,19125489373620,19125489373621,19125489373622,19125489373623, 19125489373624,19125489373625,19125489373626,19125489373627,19125489373628,19125489373629,19125489373630,19125489373631,19125489373632,19125489373633,19125489373634,19129784336384,19129784340904,19129784340905,19129784340906,19129784340907, 19129784340908,19129784340909,19129784340910,19129784340911,19129784340912,19129784340913,19129784340914,19129784340915,19129784340916,19129784340917,19129784340918,19129784340919,19129784340920,19129784340921,19129784340922,19129784340923, 19129784340924,19129784340925,19129784340926,19129784340927,19129784340928,19129784340929,19129784340930,19134079303680,19134079308200,19134079308201,19134079308202,19134079308203,19134079308204,19134079308205,19134079308206,19134079308207, 19134079308208,19134079308209,19134079308210,19134079308211,19134079308212,19134079308213,19134079308214,19134079308215,19134079308216,19134079308217,19134079308218,19134079308219,19134079308220,19134079308221,19134079308222,19134079308223, 19134079308224,19134079308225,19134079308226,19138374270976,19138374275496,19138374275497,19138374275498,19138374275499,19138374275500,19138374275501,19138374275502,19138374275503,19138374275504,19138374275505,19138374275506,19138374275507, 19138374275508,19138374275509,19138374275510,19138374275511,19138374275512,19138374275513,19138374275514,19138374275515,19138374275516,19138374275517,19138374275518,19138374275519,19138374275520,19138374275521,19138374275522,19142669238272, 19142669242792,19142669242793,19142669242794,19142669242795,19142669242796,19142669242797,19142669242798,19142669242799,19142669242800,19142669242801,19142669242802,19142669242803,19142669242804,19142669242805,19142669242806,19142669242807, 19142669242808,19142669242809,19142669242810,19142669242811,19142669242812,19142669242813,19142669242814,19142669242815,19142669242816,19142669242817,19142669242818,19146964205568,19146964210088,19146964210089,19146964210090,19146964210091, 19146964210092,19146964210093,19146964210094,19146964210095,19146964210096,19146964210097,19146964210098,19146964210099,19146964210100,19146964210101,19146964210102,19146964210103,19146964210104,19146964210105,19146964210106,19146964210107, 19146964210108,19146964210109,19146964210110,19146964210111,19146964210112,19146964210113,19146964210114,19151259172864,19151259177384,19151259177385,19151259177386,19151259177387,19151259177388,19151259177389,19151259177390,19151259177391, 19151259177392,19151259177393,19151259177394,19151259177395,19151259177396,19151259177397,19151259177398,19151259177399,19151259177400,19151259177401,19151259177402,19151259177403,19151259177404,19151259177405,19151259177406,19151259177407, 19151259177408,19151259177409,19151259177410,19155554140160,19155554144680,19155554144681,19155554144682,19155554144683,19155554144684,19155554144685,19155554144686,19155554144687,19155554144688,19155554144689,19155554144690,19155554144691, 19155554144692,19155554144693,19155554144694,19155554144695,19155554144696,19155554144697,19155554144698,19155554144699,19155554144700,19155554144701,19155554144702,19155554144703,19155554144704,19155554144705,19155554144706,19159849107456, 19159849111976,19159849111977,19159849111978,19159849111979,19159849111980,19159849111981,19159849111982,19159849111983,19159849111984,19159849111985,19159849111986,19159849111987,19159849111988,19159849111989,19159849111990,19159849111991, 19159849111992,19159849111993,19159849111994,19159849111995,19159849111996,19159849111997,19159849111998,19159849111999,19159849112000,19159849112001,19159849112002,19164144074752,19164144079272,19164144079273,19164144079274,19164144079275, 19164144079276,19164144079277,19164144079278,19164144079279,19164144079280,19164144079281,19164144079282,19164144079283,19164144079284,19164144079285,19164144079286,19164144079287,19164144079288,19164144079289,19164144079290,19164144079291, 19164144079292,19164144079293,19164144079294,19164144079295,19164144079296,19164144079297,19164144079298,19168439042048,19168439046568,19168439046569,19168439046570,19168439046571,19168439046572,19168439046573,19168439046574,19168439046575, 19168439046576,19168439046577,19168439046578,19168439046579,19168439046580,19168439046581,19168439046582,19168439046583,19168439046584,19168439046585,19168439046586,19168439046587,19168439046588,19168439046589,19168439046590,19168439046591, 19168439046592,19168439046593,19168439046594,19172734009344,19172734013864,19172734013865,19172734013866,19172734013867,19172734013868,19172734013869,19172734013870,19172734013871,19172734013872,19172734013873,19172734013874,19172734013875, 19172734013876,19172734013877,19172734013878,19172734013879,19172734013880,19172734013881,19172734013882,19172734013883,19172734013884,19172734013885,19172734013886,19172734013887,19172734013888,19172734013889,19172734013890,19177028976640, 19177028981160,19177028981161,19177028981162,19177028981163,19177028981164,19177028981165,19177028981166,19177028981167,19177028981168,19177028981169,19177028981170,19177028981171,19177028981172,19177028981173,19177028981174,19177028981175, 19177028981176,19177028981177,19177028981178,19177028981179,19177028981180,19177028981181,19177028981182,19177028981183,19177028981184,19177028981185,19177028981186,19181323943936,19181323948456,19181323948457,19181323948458,19181323948459, 19181323948460,19181323948461,19181323948462,19181323948463,19181323948464,19181323948465,19181323948466,19181323948467,19181323948468,19181323948469,19181323948470,19181323948471,19181323948472,19181323948473,19181323948474,19181323948475, 19181323948476,19181323948477,19181323948478,19181323948479,19181323948480,19181323948481,19181323948482,19185618911232,19185618915752,19185618915753,19185618915754,19185618915755,19185618915756,19185618915757,19185618915758,19185618915759, 19185618915760,19185618915761,19185618915762,19185618915763,19185618915764,19185618915765,19185618915766,19185618915767,19185618915768,19185618915769,19185618915770,19185618915771,19185618915772,19185618915773,19185618915774,19185618915775, 19185618915776,19185618915777,19185618915778,19189913878528,19189913883048,19189913883049,19189913883050,19189913883051,19189913883052,19189913883053,19189913883054,19189913883055,19189913883056,19189913883057,19189913883058,19189913883059, 19189913883060,19189913883061,19189913883062,19189913883063,19189913883064,19189913883065,19189913883066,19189913883067,19189913883068,19189913883069,19189913883070,19189913883071,19189913883072,19189913883073,19189913883074,19194208845824, 19194208850344,19194208850345,19194208850346,19194208850347,19194208850348,19194208850349,19194208850350,19194208850351,19194208850352,19194208850353,19194208850354,19194208850355,19194208850356,19194208850357,19194208850358,19194208850359, 19194208850360,19194208850361,19194208850362,19194208850363,19194208850364,19194208850365,19194208850366,19194208850367,19194208850368,19194208850369,19194208850370,19108309499904,19108309504424,19108309504425,19108309504426,19108309504427, 19108309504428,19108309504429,19108309504430,19108309504431,19108309504432,19108309504433,19108309504434,19108309504435,19108309504436,19108309504437,19108309504438,19108309504439,19108309504440,19108309504441,19108309504442,19108309504443, 19108309504444,19108309504445,19108309504446,19108309504447,19108309504448,19108309504449,19108309504450,19112604467200,19112604471720,19112604471721,19112604471722,19112604471723,19112604471724,19112604471725,19112604471726,19112604471727, 19112604471728,19112604471729,19112604471730,19112604471731,19112604471732,19112604471733,19112604471734,19112604471735,19112604471736,19112604471737,19112604471738,19112604471739,19112604471740,19112604471741,19112604471742,19112604471743, 19112604471744,19112604471745,19112604471746,19116899434496,19116899439016,19116899439017,19116899439018,19116899439019,19116899439020,19116899439021,19116899439022,19116899439023,19116899439024,19116899439025,19116899439026,19116899439027, 19116899439028,19116899439029,19116899439030,19116899439031,19116899439032,19116899439033,19116899439034,19116899439035,19116899439036,19116899439037,19116899439038,19116899439039,19116899439040,19116899439041,19116899439042,19121194401792, 19121194406312,19121194406313,19121194406314,19121194406315,19121194406316,19121194406317,19121194406318,19121194406319,19121194406320,19121194406321,19121194406322,19121194406323,19121194406324,19121194406325,19121194406326,19121194406327, 19121194406328,19121194406329,19121194406330,19121194406331,19121194406332,19121194406333,19121194406334,19121194406335,19121194406336,19121194406337,19121194406338,19125489369088,19125489373608,19125489373609,19125489373610,19125489373611, 19125489373612,19125489373613,19125489373614,19125489373615,19125489373616,19125489373617,19125489373618,19125489373619,19125489373620,19125489373621,19125489373622,19125489373623,19125489373624,19125489373625,19125489373626,19125489373627, 19125489373628,19125489373629,19125489373630,19125489373631,19125489373632,19125489373633,19125489373634,19129784336384,19129784340904,19129784340905,19129784340906,19129784340907,19129784340908,19129784340909,19129784340910,19129784340911, 19129784340912,19129784340913,19129784340914,19129784340915,19129784340916,19129784340917,19129784340918,19129784340919,19129784340920,19129784340921,19129784340922,19129784340923,19129784340924,19129784340925,19129784340926,19129784340927, 19129784340928,19129784340929,19129784340930,19134079303680,19134079308200,19134079308201,19134079308202,19134079308203,19134079308204,19134079308205,19134079308206,19134079308207,19134079308208,19134079308209,19134079308210,19134079308211, 19134079308212,19134079308213,19134079308214,19134079308215,19134079308216,19134079308217,19134079308218,19134079308219,19134079308220,19134079308221,19134079308222,19134079308223,19134079308224,19134079308225,19134079308226,19138374270976, 19138374275496,19138374275497,19138374275498,19138374275499,19138374275500,19138374275501,19138374275502,19138374275503,19138374275504,19138374275505,19138374275506,19138374275507,19138374275508,19138374275509,19138374275510,19138374275511, 19138374275512,19138374275513,19138374275514,19138374275515,19138374275516,19138374275517,19138374275518,19138374275519,19138374275520,19138374275521,19138374275522,19142669238272,19142669242792,19142669242793,19142669242794,19142669242795, 19142669242796,19142669242797,19142669242798,19142669242799,19142669242800,19142669242801,19142669242802,19142669242803,19142669242804,19142669242805,19142669242806,19142669242807,19142669242808,19142669242809,19142669242810,19142669242811, 19142669242812,19142669242813,19142669242814,19142669242815,19142669242816,19142669242817,19142669242818,19146964205568,19146964210088,19146964210089,19146964210090,19146964210091,19146964210092,19146964210093,19146964210094,19146964210095, 19146964210096,19146964210097,19146964210098,19146964210099,19146964210100,19146964210101,19146964210102,19146964210103,19146964210104,19146964210105,19146964210106,19146964210107,19146964210108,19146964210109,19146964210110,19146964210111, 19146964210112,19146964210113,19146964210114,19151259172864,19151259177384,19151259177385,19151259177386,19151259177387,19151259177388,19151259177389,19151259177390,19151259177391,19151259177392,19151259177393,19151259177394,19151259177395, 19151259177396,19151259177397,19151259177398,19151259177399,19151259177400,19151259177401,19151259177402,19151259177403,19151259177404,19151259177405,19151259177406,19151259177407,19151259177408,19151259177409,19151259177410,19155554140160, 19155554144680,19155554144681,19155554144682,19155554144683,19155554144684,19155554144685,19155554144686,19155554144687,19155554144688,19155554144689,19155554144690,19155554144691,19155554144692,19155554144693,19155554144694,19155554144695, 19155554144696,19155554144697,19155554144698,19155554144699,19155554144700,19155554144701,19155554144702,19155554144703,19155554144704,19155554144705,19155554144706,19159849107456,19159849111976,19159849111977,19159849111978,19159849111979, 19159849111980,19159849111981,19159849111982,19159849111983,19159849111984,19159849111985,19159849111986,19159849111987,19159849111988,19159849111989,19159849111990,19159849111991,19159849111992,19159849111993,19159849111994,19159849111995, 19159849111996,19159849111997,19159849111998,19159849111999,19159849112000,19159849112001,19159849112002,19164144074752,19164144079272,19164144079273,19164144079274,19164144079275,19164144079276,19164144079277,19164144079278,19164144079279, 19164144079280,19164144079281,19164144079282,19164144079283,19164144079284,19164144079285,19164144079286,19164144079287,19164144079288,19164144079289,19164144079290,19164144079291,19164144079292,19164144079293,19164144079294,19164144079295, 19164144079296,19164144079297,19164144079298,19168439042048,19168439046568,19168439046569,19168439046570,19168439046571,19168439046572,19168439046573,19168439046574,19168439046575,19168439046576,19168439046577,19168439046578,19168439046579, 19168439046580,19168439046581,19168439046582,19168439046583,19168439046584,19168439046585,19168439046586,19168439046587,19168439046588,19168439046589,19168439046590,19168439046591,19168439046592,19168439046593,19168439046594,19172734009344, 19172734013864,19172734013865,19172734013866,19172734013867,19172734013868,19172734013869,19172734013870,19172734013871,19172734013872,19172734013873,19172734013874,19172734013875,19172734013876,19172734013877,19172734013878,19172734013879, 19172734013880,19172734013881,19172734013882,19172734013883,19172734013884,19172734013885,19172734013886,19172734013887,19172734013888,19172734013889,19172734013890,19177028976640,19177028981160,19177028981161,19177028981162,19177028981163, 19177028981164,19177028981165,19177028981166,19177028981167,19177028981168,19177028981169,19177028981170,19177028981171,19177028981172,19177028981173,19177028981174,19177028981175,19177028981176,19177028981177,19177028981178,19177028981179, 19177028981180,19177028981181,19177028981182,19177028981183,19177028981184,19177028981185,19177028981186,19181323943936,19181323948456,19181323948457,19181323948458,19181323948459,19181323948460,19181323948461,19181323948462,19181323948463, 19181323948464,19181323948465,19181323948466,19181323948467,19181323948468,19181323948469,19181323948470,19181323948471,19181323948472,19181323948473,19181323948474,19181323948475,19181323948476,19181323948477,19181323948478,19181323948479, 19181323948480,19181323948481,19181323948482,19185618911232,19185618915752,19185618915753,19185618915754,19185618915755,19185618915756,19185618915757,19185618915758,19185618915759,19185618915760,19185618915761,19185618915762,19185618915763, 19185618915764,19185618915765,19185618915766,19185618915767,19185618915768,19185618915769,19185618915770,19185618915771,19185618915772,19185618915773,19185618915774,19185618915775,19185618915776,19185618915777,19185618915778,19189913878528, 19189913883048,19189913883049,19189913883050,19189913883051,19189913883052,19189913883053,19189913883054,19189913883055,19189913883056,19189913883057,19189913883058,19189913883059,19189913883060,19189913883061,19189913883062,19189913883063, 19189913883064,19189913883065,19189913883066,19189913883067,19189913883068,19189913883069,19189913883070,19189913883071,19189913883072,19189913883073,19189913883074,19194208845824,19194208850344,19194208850345,19194208850346,19194208850347, 19194208850348,19194208850349,19194208850350,19194208850351,19194208850352,19194208850353,19194208850354,19194208850355,19194208850356,19194208850357,19194208850358,19194208850359,19194208850360,19194208850361,19194208850362,19194208850363, 19194208850364,19194208850365,19194208850366,19194208850367,19194208850368,19194208850369,19194208850370,19108309499904,19108309504424,19108309504425,19108309504426,19108309504427,19108309504428,19108309504429,19108309504430,19108309504431, 19108309504432,19108309504433,19108309504434,19108309504435,19108309504436,19108309504437,19108309504438,19108309504439,19108309504440,19108309504441,19108309504442,19108309504443,19108309504444,19108309504445,19108309504446,19108309504447, 19108309504448,19108309504449,19108309504450,19112604467200,19112604471720,19112604471721,19112604471722,19112604471723,19112604471724,19112604471725,19112604471726,19112604471727,19112604471728,19112604471729,19112604471730,19112604471731, 19112604471732,19112604471733,19112604471734,19112604471735,19112604471736,19112604471737,19112604471738,19112604471739,19112604471740,19112604471741,19112604471742,19112604471743,19112604471744,19112604471745,19112604471746,19116899434496, 19116899439016,19116899439017,19116899439018,19116899439019,19116899439020,19116899439021,19116899439022,19116899439023,19116899439024,19116899439025,19116899439026,19116899439027,19116899439028,19116899439029,19116899439030,19116899439031, 19116899439032,19116899439033,19116899439034,19116899439035,19116899439036,19116899439037,19116899439038,19116899439039,19116899439040,19116899439041,19116899439042,19121194401792,19121194406312,19121194406313,19121194406314,19121194406315, 19121194406316,19121194406317,19121194406318,19121194406319,19121194406320,19121194406321,19121194406322,19121194406323,19121194406324,19121194406325,19121194406326,19121194406327,19121194406328,19121194406329,19121194406330,19121194406331, 19121194406332,19121194406333,19121194406334,19121194406335,19121194406336,19121194406337,19121194406338,19125489369088,19125489373608,19125489373609,19125489373610,19125489373611,19125489373612,19125489373613,19125489373614,19125489373615, 19125489373616,19125489373617,19125489373618,19125489373619,19125489373620,19125489373621,19125489373622,19125489373623,19125489373624,19125489373625,19125489373626,19125489373627,19125489373628,19125489373629,19125489373630,19125489373631, 19125489373632,19125489373633,19125489373634,19129784336384,19129784340904,19129784340905,19129784340906,19129784340907,19129784340908,19129784340909,19129784340910,19129784340911,19129784340912,19129784340913,19129784340914,19129784340915, 19129784340916,19129784340917,19129784340918,19129784340919,19129784340920,19129784340921,19129784340922,19129784340923,19129784340924,19129784340925,19129784340926,19129784340927,19129784340928,19129784340929,19129784340930,19134079303680, 19134079308200,19134079308201,19134079308202,19134079308203,19134079308204,19134079308205,19134079308206,19134079308207,19134079308208,19134079308209,19134079308210,19134079308211,19134079308212,19134079308213,19134079308214,19134079308215, 19134079308216,19134079308217,19134079308218,19134079308219,19134079308220,19134079308221,19134079308222,19134079308223,19134079308224,19134079308225,19134079308226,19138374270976,19138374275496,19138374275497,19138374275498,19138374275499, 19138374275500,19138374275501,19138374275502,19138374275503,19138374275504,19138374275505,19138374275506,19138374275507,19138374275508,19138374275509,19138374275510,19138374275511,19138374275512,19138374275513,19138374275514,19138374275515, 19138374275516,19138374275517,19138374275518,19138374275519,19138374275520,19138374275521,19138374275522,19142669238272,19142669242792,19142669242793,19142669242794,19142669242795,19142669242796,19142669242797,19142669242798,19142669242799, 19142669242800,19142669242801,19142669242802,19142669242803,19142669242804,19142669242805,19142669242806,19142669242807,19142669242808,19142669242809,19142669242810,19142669242811,19142669242812,19142669242813,19142669242814,19142669242815, 19142669242816,19142669242817,19142669242818,19146964205568,19146964210088,19146964210089,19146964210090,19146964210091,19146964210092,19146964210093,19146964210094,19146964210095,19146964210096,19146964210097,19146964210098,19146964210099, 19146964210100,19146964210101,19146964210102,19146964210103,19146964210104,19146964210105,19146964210106,19146964210107,19146964210108,19146964210109,19146964210110,19146964210111,19146964210112,19146964210113,19146964210114,19151259172864, 19151259177384,19151259177385,19151259177386,19151259177387,19151259177388,19151259177389,19151259177390,19151259177391,19151259177392,19151259177393,19151259177394,19151259177395,19151259177396,19151259177397,19151259177398,19151259177399, 19151259177400,19151259177401,19151259177402,19151259177403,19151259177404,19151259177405,19151259177406,19151259177407,19151259177408,19151259177409,19151259177410,19155554140160,19155554144680,19155554144681,19155554144682,19155554144683, 19155554144684,19155554144685,19155554144686,19155554144687,19155554144688,19155554144689,19155554144690,19155554144691,19155554144692,19155554144693,19155554144694,19155554144695,19155554144696,19155554144697,19155554144698,19155554144699, 19155554144700,19155554144701,19155554144702,19155554144703,19155554144704,19155554144705,19155554144706,19159849107456,19159849111976,19159849111977,19159849111978,19159849111979,19159849111980,19159849111981,19159849111982,19159849111983, 19159849111984,19159849111985,19159849111986,19159849111987,19159849111988,19159849111989,19159849111990,19159849111991,19159849111992,19159849111993,19159849111994,19159849111995,19159849111996,19159849111997,19159849111998,19159849111999, 19159849112000,19159849112001,19159849112002,19164144074752,19164144079272,19164144079273,19164144079274,19164144079275,19164144079276,19164144079277,19164144079278,19164144079279,19164144079280,19164144079281,19164144079282,19164144079283, 19164144079284,19164144079285,19164144079286,19164144079287,19164144079288,19164144079289,19164144079290,19164144079291,19164144079292,19164144079293,19164144079294,19164144079295,19164144079296,19164144079297,19164144079298,19168439042048, 19168439046568,19168439046569,19168439046570,19168439046571,19168439046572,19168439046573,19168439046574,19168439046575,19168439046576,19168439046577,19168439046578,19168439046579,19168439046580,19168439046581,19168439046582,19168439046583, 19168439046584,19168439046585,19168439046586,19168439046587,19168439046588,19168439046589,19168439046590,19168439046591,19168439046592,19168439046593,19168439046594,19172734009344,19172734013864,19172734013865,19172734013866,19172734013867, 19172734013868,19172734013869,19172734013870,19172734013871,19172734013872,19172734013873,19172734013874,19172734013875,19172734013876,19172734013877,19172734013878,19172734013879,19172734013880,19172734013881,19172734013882,19172734013883, 19172734013884,19172734013885,19172734013886,19172734013887,19172734013888,19172734013889,19172734013890,19177028976640,19177028981160,19177028981161,19177028981162,19177028981163,19177028981164,19177028981165,19177028981166,19177028981167, 19177028981168,19177028981169,19177028981170,19177028981171,19177028981172,19177028981173,19177028981174,19177028981175,19177028981176,19177028981177,19177028981178,19177028981179,19177028981180,19177028981181,19177028981182,19177028981183, 19177028981184,19177028981185,19177028981186,19181323943936,19181323948456,19181323948457,19181323948458,19181323948459,19181323948460,19181323948461,19181323948462,19181323948463,19181323948464,19181323948465,19181323948466,19181323948467, 19181323948468,19181323948469,19181323948470,19181323948471,19181323948472,19181323948473,19181323948474,19181323948475,19181323948476,19181323948477,19181323948478,19181323948479,19181323948480,19181323948481,19181323948482,19185618911232, 19185618915752,19185618915753,19185618915754,19185618915755,19185618915756,19185618915757,19185618915758,19185618915759,19185618915760,19185618915761,19185618915762,19185618915763,19185618915764,19185618915765,19185618915766,19185618915767, 19185618915768,19185618915769,19185618915770,19185618915771,19185618915772,19185618915773,19185618915774,19185618915775,19185618915776,19185618915777,19185618915778,19189913878528,19189913883048,19189913883049,19189913883050,19189913883051, 19189913883052,19189913883053,19189913883054,19189913883055,19189913883056,19189913883057,19189913883058,19189913883059,19189913883060,19189913883061,19189913883062,19189913883063,19189913883064,19189913883065,19189913883066,19189913883067, 19189913883068,19189913883069,19189913883070,19189913883071,19189913883072,19189913883073,19189913883074,19194208845824,19194208850344,19194208850345,19194208850346,19194208850347,19194208850348,19194208850349,19194208850350,19194208850351, 19194208850352,19194208850353,19194208850354,19194208850355,19194208850356,19194208850357,19194208850358,19194208850359,19194208850360,19194208850361,19194208850362,19194208850363,19194208850364,19194208850365,19194208850366,19194208850367, 19194208850368,19194208850369,19194208850370,19108309499904,19108309504424,19108309504425,19108309504426,19108309504427,19108309504428,19108309504429,19108309504430,19108309504431,19108309504432,19108309504433,19108309504434,19108309504435, 19108309504436,19108309504437,19108309504438,19108309504439,19108309504440,19108309504441,19108309504442,19108309504443,19108309504444,19108309504445,19108309504446,19108309504447,19108309504448,19108309504449,19108309504450,19112604467200, 19112604471720,19112604471721,19112604471722,19112604471723,19112604471724,19112604471725,19112604471726,19112604471727,19112604471728,19112604471729,19112604471730,19112604471731,19112604471732,19112604471733,19112604471734,19112604471735, 19112604471736,19112604471737,19112604471738,19112604471739,19112604471740,19112604471741,19112604471742,19112604471743,19112604471744,19112604471745,19112604471746,19116899434496,19116899439016,19116899439017,19116899439018,19116899439019, 19116899439020,19116899439021,19116899439022,19116899439023,19116899439024,19116899439025,19116899439026,19116899439027,19116899439028,19116899439029,19116899439030,19116899439031,19116899439032,19116899439033,19116899439034,19116899439035, 19116899439036,19116899439037,19116899439038,19116899439039,19116899439040,19116899439041,19116899439042,19121194401792,19121194406312,19121194406313,19121194406314,19121194406315,19121194406316,19121194406317,19121194406318,19121194406319, 19121194406320,19121194406321,19121194406322,19121194406323,19121194406324,19121194406325,19121194406326,19121194406327,19121194406328,19121194406329,19121194406330,19121194406331,19121194406332,19121194406333,19121194406334,19121194406335, 19121194406336,19121194406337,19121194406338,19125489369088,19125489373608,19125489373609,19125489373610,19125489373611,19125489373612,19125489373613,19125489373614,19125489373615,19125489373616,19125489373617,19125489373618,19125489373619, 19125489373620,19125489373621,19125489373622,19125489373623,19125489373624,19125489373625,19125489373626,19125489373627,19125489373628,19125489373629,19125489373630,19125489373631,19125489373632,19125489373633,19125489373634,19129784336384, 19129784340904,19129784340905,19129784340906,19129784340907,19129784340908,19129784340909,19129784340910,19129784340911,19129784340912,19129784340913,19129784340914,19129784340915,19129784340916,19129784340917,19129784340918,19129784340919, 19129784340920,19129784340921,19129784340922,19129784340923,19129784340924,19129784340925,19129784340926,19129784340927,19129784340928,19129784340929,19129784340930,19134079303680,19134079308200,19134079308201,19134079308202,19134079308203, 19134079308204,19134079308205,19134079308206,19134079308207,19134079308208,19134079308209,19134079308210,19134079308211,19134079308212,19134079308213,19134079308214,19134079308215,19134079308216,19134079308217,19134079308218,19134079308219, 19134079308220,19134079308221,19134079308222,19134079308223,19134079308224,19134079308225,19134079308226,19138374270976,19138374275496,19138374275497,19138374275498,19138374275499,19138374275500,19138374275501,19138374275502,19138374275503, 19138374275504,19138374275505,19138374275506,19138374275507,19138374275508,19138374275509,19138374275510,19138374275511,19138374275512,19138374275513,19138374275514,19138374275515,19138374275516,19138374275517,19138374275518,19138374275519, 19138374275520,19138374275521,19138374275522,19142669238272,19142669242792,19142669242793,19142669242794,19142669242795,19142669242796,19142669242797,19142669242798,19142669242799,19142669242800,19142669242801,19142669242802,19142669242803, 19142669242804,19142669242805,19142669242806,19142669242807,19142669242808,19142669242809,19142669242810,19142669242811,19142669242812,19142669242813,19142669242814,19142669242815,19142669242816,19142669242817,19142669242818,19146964205568, 19146964210088,19146964210089,19146964210090,19146964210091,19146964210092,19146964210093,19146964210094,19146964210095,19146964210096,19146964210097,19146964210098,19146964210099,19146964210100,19146964210101,19146964210102,19146964210103, 19146964210104,19146964210105,19146964210106,19146964210107,19146964210108,19146964210109,19146964210110,19146964210111,19146964210112,19146964210113,19146964210114,19151259172864,19151259177384,19151259177385,19151259177386,19151259177387, 19151259177388,19151259177389,19151259177390,19151259177391,19151259177392,19151259177393,19151259177394,19151259177395,19151259177396,19151259177397,19151259177398,19151259177399,19151259177400,19151259177401,19151259177402,19151259177403, 19151259177404,19151259177405,19151259177406,19151259177407,19151259177408,19151259177409,19151259177410,19155554140160,19155554144680,19155554144681,19155554144682,19155554144683,19155554144684,19155554144685,19155554144686,19155554144687, 19155554144688,19155554144689,19155554144690,19155554144691,19155554144692,19155554144693,19155554144694,19155554144695,19155554144696,19155554144697,19155554144698,19155554144699,19155554144700,19155554144701,19155554144702,19155554144703, 19155554144704,19155554144705,19155554144706,19159849107456,19159849111976,19159849111977,19159849111978,19159849111979,19159849111980,19159849111981,19159849111982,19159849111983,19159849111984,19159849111985,19159849111986,19159849111987, 19159849111988,19159849111989,19159849111990,19159849111991,19159849111992,19159849111993,19159849111994,19159849111995,19159849111996,19159849111997,19159849111998,19159849111999,19159849112000,19159849112001,19159849112002,19164144074752, 19164144079272,19164144079273,19164144079274,19164144079275,19164144079276,19164144079277,19164144079278,19164144079279,19164144079280,19164144079281,19164144079282,19164144079283,19164144079284,19164144079285,19164144079286,19164144079287, 19164144079288,19164144079289,19164144079290,19164144079291,19164144079292,19164144079293,19164144079294,19164144079295,19164144079296,19164144079297,19164144079298,19168439042048,19168439046568,19168439046569,19168439046570,19168439046571, 19168439046572,19168439046573,19168439046574,19168439046575,19168439046576,19168439046577,19168439046578,19168439046579,19168439046580,19168439046581,19168439046582,19168439046583,19168439046584,19168439046585,19168439046586,19168439046587, 19168439046588,19168439046589,19168439046590,19168439046591,19168439046592,19168439046593,19168439046594,19172734009344,19172734013864,19172734013865,19172734013866,19172734013867,19172734013868,19172734013869,19172734013870,19172734013871, 19172734013872,19172734013873,19172734013874,19172734013875,19172734013876,19172734013877,19172734013878,19172734013879,19172734013880,19172734013881,19172734013882,19172734013883,19172734013884,19172734013885,19172734013886,19172734013887, 19172734013888,19172734013889,19172734013890,19177028976640,19177028981160,19177028981161,19177028981162,19177028981163,19177028981164,19177028981165,19177028981166,19177028981167,19177028981168,19177028981169,19177028981170,19177028981171, 19177028981172,19177028981173,19177028981174,19177028981175,19177028981176,19177028981177,19177028981178,19177028981179,19177028981180,19177028981181,19177028981182,19177028981183,19177028981184,19177028981185,19177028981186,19181323943936, 19181323948456,19181323948457,19181323948458,19181323948459,19181323948460,19181323948461,19181323948462,19181323948463,19181323948464,19181323948465,19181323948466,19181323948467,19181323948468,19181323948469,19181323948470,19181323948471, 19181323948472,19181323948473,19181323948474,19181323948475,19181323948476,19181323948477,19181323948478,19181323948479,19181323948480,19181323948481,19181323948482,19185618911232,19185618915752,19185618915753,19185618915754,19185618915755, 19185618915756,19185618915757,19185618915758,19185618915759,19185618915760,19185618915761,19185618915762,19185618915763,19185618915764,19185618915765,19185618915766,19185618915767,19185618915768,19185618915769,19185618915770,19185618915771, 19185618915772,19185618915773,19185618915774,19185618915775,19185618915776,19185618915777,19185618915778,19189913878528,19189913883048,19189913883049,19189913883050,19189913883051,19189913883052,19189913883053,19189913883054,19189913883055, 19189913883056,19189913883057,19189913883058,19189913883059,19189913883060,19189913883061,19189913883062,19189913883063,19189913883064,19189913883065,19189913883066,19189913883067,19189913883068,19189913883069,19189913883070,19189913883071, 19189913883072,19189913883073,19189913883074,19194208845824,19194208850344,19194208850345,19194208850346,19194208850347,19194208850348,19194208850349,19194208850350,19194208850351,19194208850352,19194208850353,19194208850354,19194208850355, 19194208850356,19194208850357,19194208850358,19194208850359,19194208850360,19194208850361,19194208850362,19194208850363,19194208850364,19194208850365,19194208850366,19194208850367,19194208850368,19194208850369,19194208850370,19108309499904, 19108309504424,19108309504425,19108309504426,19108309504427,19108309504428,19108309504429,19108309504430,19108309504431,19108309504432,19108309504433,19108309504434,19108309504435,19108309504436,19108309504437,19108309504438,19108309504439, 19108309504440,19108309504441,19108309504442,19108309504443,19108309504444,19108309504445,19108309504446,19108309504447,19108309504448,19108309504449,19108309504450,19112604467200,19112604471720,19112604471721,19112604471722,19112604471723, 19112604471724,19112604471725,19112604471726,19112604471727,19112604471728,19112604471729,19112604471730,19112604471731,19112604471732,19112604471733,19112604471734,19112604471735,19112604471736,19112604471737,19112604471738,19112604471739, 19112604471740,19112604471741,19112604471742,19112604471743,19112604471744,19112604471745,19112604471746,19116899434496,19116899439016,19116899439017,19116899439018,19116899439019,19116899439020,19116899439021,19116899439022,19116899439023, 19116899439024,19116899439025,19116899439026,19116899439027,19116899439028,19116899439029,19116899439030,19116899439031,19116899439032,19116899439033,19116899439034,19116899439035,19116899439036,19116899439037,19116899439038,19116899439039, 19116899439040,19116899439041,19116899439042,19121194401792,19121194406312,19121194406313,19121194406314,19121194406315,19121194406316,19121194406317,19121194406318,19121194406319,19121194406320,19121194406321,19121194406322,19121194406323, 19121194406324,19121194406325,19121194406326,19121194406327,19121194406328,19121194406329,19121194406330,19121194406331,19121194406332,19121194406333,19121194406334,19121194406335,19121194406336,19121194406337,19121194406338,19125489369088, 19125489373608,19125489373609,19125489373610,19125489373611,19125489373612,19125489373613,19125489373614,19125489373615,19125489373616,19125489373617,19125489373618,19125489373619,19125489373620,19125489373621,19125489373622,19125489373623, 19125489373624,19125489373625,19125489373626,19125489373627,19125489373628,19125489373629,19125489373630,19125489373631,19125489373632,19125489373633,19125489373634,19129784336384,19129784340904,19129784340905,19129784340906,19129784340907, 19129784340908,19129784340909,19129784340910,19129784340911,19129784340912,19129784340913,19129784340914,19129784340915,19129784340916,19129784340917,19129784340918,19129784340919,19129784340920,19129784340921,19129784340922,19129784340923, 19129784340924,19129784340925,19129784340926,19129784340927,19129784340928,19129784340929,19129784340930,19134079303680,19134079308200,19134079308201,19134079308202,19134079308203,19134079308204,19134079308205,19134079308206,19134079308207, 19134079308208,19134079308209,19134079308210,19134079308211,19134079308212,19134079308213,19134079308214,19134079308215,19134079308216,19134079308217,19134079308218,19134079308219,19134079308220,19134079308221,19134079308222,19134079308223, 19134079308224,19134079308225,19134079308226,19138374270976,19138374275496,19138374275497,19138374275498,19138374275499,19138374275500,19138374275501,19138374275502,19138374275503,19138374275504,19138374275505,19138374275506,19138374275507, 19138374275508,19138374275509,19138374275510,19138374275511,19138374275512,19138374275513,19138374275514,19138374275515,19138374275516,19138374275517,19138374275518,19138374275519,19138374275520,19138374275521,19138374275522,19142669238272, 19142669242792,19142669242793,19142669242794,19142669242795,19142669242796,19142669242797,19142669242798,19142669242799,19142669242800,19142669242801,19142669242802,19142669242803,19142669242804,19142669242805,19142669242806,19142669242807, 19142669242808,19142669242809,19142669242810,19142669242811,19142669242812,19142669242813,19142669242814,19142669242815,19142669242816,19142669242817,19142669242818,19146964205568,19146964210088,19146964210089,19146964210090,19146964210091, 19146964210092,19146964210093,19146964210094,19146964210095,19146964210096,19146964210097,19146964210098,19146964210099,19146964210100,19146964210101,19146964210102,19146964210103,19146964210104,19146964210105,19146964210106,19146964210107, 19146964210108,19146964210109,19146964210110,19146964210111,19146964210112,19146964210113,19146964210114,19151259172864,19151259177384,19151259177385,19151259177386,19151259177387,19151259177388,19151259177389,19151259177390,19151259177391, 19151259177392,19151259177393,19151259177394,19151259177395,19151259177396,19151259177397,19151259177398,19151259177399,19151259177400,19151259177401,19151259177402,19151259177403,19151259177404,19151259177405,19151259177406,19151259177407, 19151259177408,19151259177409,19151259177410,19155554140160,19155554144680,19155554144681,19155554144682,19155554144683,19155554144684,19155554144685,19155554144686,19155554144687,19155554144688,19155554144689,19155554144690,19155554144691, 19155554144692,19155554144693,19155554144694,19155554144695,19155554144696,19155554144697,19155554144698,19155554144699,19155554144700,19155554144701,19155554144702,19155554144703,19155554144704,19155554144705,19155554144706,19159849107456, 19159849111976,19159849111977,19159849111978,19159849111979,19159849111980,19159849111981,19159849111982,19159849111983,19159849111984,19159849111985,19159849111986,19159849111987,19159849111988,19159849111989,19159849111990,19159849111991, 19159849111992,19159849111993,19159849111994,19159849111995,19159849111996,19159849111997,19159849111998,19159849111999,19159849112000,19159849112001,19159849112002,19164144074752,19164144079272,19164144079273,19164144079274,19164144079275, 19164144079276,19164144079277,19164144079278,19164144079279,19164144079280,19164144079281,19164144079282,19164144079283,19164144079284,19164144079285,19164144079286,19164144079287,19164144079288,19164144079289,19164144079290,19164144079291, 19164144079292,19164144079293,19164144079294,19164144079295,19164144079296,19164144079297,19164144079298,19168439042048,19168439046568,19168439046569,19168439046570,19168439046571,19168439046572,19168439046573,19168439046574,19168439046575, 19168439046576,19168439046577,19168439046578,19168439046579,19168439046580,19168439046581,19168439046582,19168439046583,19168439046584,19168439046585,19168439046586,19168439046587,19168439046588,19168439046589,19168439046590,19168439046591, 19168439046592,19168439046593,19168439046594,19172734009344,19172734013864,19172734013865,19172734013866,19172734013867,19172734013868,19172734013869,19172734013870,19172734013871,19172734013872,19172734013873,19172734013874,19172734013875, 19172734013876,19172734013877,19172734013878,19172734013879,19172734013880,19172734013881,19172734013882,19172734013883,19172734013884,19172734013885,19172734013886,19172734013887,19172734013888,19172734013889,19172734013890,19177028976640, 19177028981160,19177028981161,19177028981162,19177028981163,19177028981164,19177028981165,19177028981166,19177028981167,19177028981168,19177028981169,19177028981170,19177028981171,19177028981172,19177028981173,19177028981174,19177028981175, 19177028981176,19177028981177,19177028981178,19177028981179,19177028981180,19177028981181,19177028981182,19177028981183,19177028981184,19177028981185,19177028981186,19181323943936,19181323948456,19181323948457,19181323948458,19181323948459, 19181323948460,19181323948461,19181323948462,19181323948463,19181323948464,19181323948465,19181323948466,19181323948467,19181323948468,19181323948469,19181323948470,19181323948471,19181323948472,19181323948473,19181323948474,19181323948475, 19181323948476,19181323948477,19181323948478,19181323948479,19181323948480,19181323948481,19181323948482,19185618911232,19185618915752,19185618915753,19185618915754,19185618915755,19185618915756,19185618915757,19185618915758,19185618915759, 19185618915760,19185618915761,19185618915762,19185618915763,19185618915764,19185618915765,19185618915766,19185618915767,19185618915768,19185618915769,19185618915770,19185618915771,19185618915772,19185618915773,19185618915774,19185618915775, 19185618915776,19185618915777,19185618915778,19189913878528,19189913883048,19189913883049,19189913883050,19189913883051,19189913883052,19189913883053,19189913883054,19189913883055,19189913883056,19189913883057,19189913883058,19189913883059, 19189913883060,19189913883061,19189913883062,19189913883063,19189913883064,19189913883065,19189913883066,19189913883067,19189913883068,19189913883069,19189913883070,19189913883071,19189913883072,19189913883073,19189913883074,19194208845824, 19194208850344,19194208850345,19194208850346,19194208850347,19194208850348,19194208850349,19194208850350,19194208850351,19194208850352,19194208850353,19194208850354,19194208850355,19194208850356,19194208850357,19194208850358,19194208850359, 19194208850360,19194208850361,19194208850362,19194208850363,19194208850364,19194208850365,19194208850366,19194208850367,19194208850368,19194208850369,19194208850370,19108309499904,19108309504424,19108309504425,19108309504426,19108309504427, 19108309504428,19108309504429,19108309504430,19108309504431,19108309504432,19108309504433,19108309504434,19108309504435,19108309504436,19108309504437,19108309504438,19108309504439,19108309504440,19108309504441,19108309504442,19108309504443, 19108309504444,19108309504445,19108309504446,19108309504447,19108309504448,19108309504449,19108309504450,19112604467200,19112604471720,19112604471721,19112604471722,19112604471723,19112604471724,19112604471725,19112604471726,19112604471727, 19112604471728,19112604471729,19112604471730,19112604471731,19112604471732,19112604471733,19112604471734,19112604471735,19112604471736,19112604471737,19112604471738,19112604471739,19112604471740,19112604471741,19112604471742,19112604471743, 19112604471744,19112604471745,19112604471746,19116899434496,19116899439016,19116899439017,19116899439018,19116899439019,19116899439020,19116899439021,19116899439022,19116899439023,19116899439024,19116899439025,19116899439026,19116899439027, 19116899439028,19116899439029,19116899439030,19116899439031,19116899439032,19116899439033,19116899439034,19116899439035,19116899439036,19116899439037,19116899439038,19116899439039,19116899439040,19116899439041,19116899439042,19121194401792, 19121194406312,19121194406313,19121194406314,19121194406315,19121194406316,19121194406317,19121194406318,19121194406319,19121194406320,19121194406321,19121194406322,19121194406323,19121194406324,19121194406325,19121194406326,19121194406327, 19121194406328,19121194406329,19121194406330,19121194406331,19121194406332,19121194406333,19121194406334,19121194406335,19121194406336,19121194406337,19121194406338,19125489369088,19125489373608,19125489373609,19125489373610,19125489373611, 19125489373612,19125489373613,19125489373614,19125489373615,19125489373616,19125489373617,19125489373618,19125489373619,19125489373620,19125489373621,19125489373622,19125489373623,19125489373624,19125489373625,19125489373626,19125489373627, 19125489373628,19125489373629,19125489373630,19125489373631,19125489373632,19125489373633,19125489373634,19129784336384,19129784340904,19129784340905,19129784340906,19129784340907,19129784340908,19129784340909,19129784340910,19129784340911, 19129784340912,19129784340913,19129784340914,19129784340915,19129784340916,19129784340917,19129784340918,19129784340919,19129784340920,19129784340921,19129784340922,19129784340923,19129784340924,19129784340925,19129784340926,19129784340927, 19129784340928,19129784340929,19129784340930,19134079303680,19134079308200,19134079308201,19134079308202,19134079308203,19134079308204,19134079308205,19134079308206,19134079308207,19134079308208,19134079308209,19134079308210,19134079308211, 19134079308212,19134079308213,19134079308214,19134079308215,19134079308216,19134079308217,19134079308218,19134079308219,19134079308220,19134079308221,19134079308222,19134079308223,19134079308224,19134079308225,19134079308226,19138374270976, 19138374275496,19138374275497,19138374275498,19138374275499,19138374275500,19138374275501,19138374275502,19138374275503,19138374275504,19138374275505,19138374275506,19138374275507,19138374275508,19138374275509,19138374275510,19138374275511, 19138374275512,19138374275513,19138374275514,19138374275515,19138374275516,19138374275517,19138374275518,19138374275519,19138374275520,19138374275521,19138374275522,19142669238272,19142669242792,19142669242793,19142669242794,19142669242795, 19142669242796,19142669242797,19142669242798,19142669242799,19142669242800,19142669242801,19142669242802,19142669242803,19142669242804,19142669242805,19142669242806,19142669242807,19142669242808,19142669242809,19142669242810,19142669242811, 19142669242812,19142669242813,19142669242814,19142669242815,19142669242816,19142669242817,19142669242818,19146964205568,19146964210088,19146964210089,19146964210090,19146964210091,19146964210092,19146964210093,19146964210094,19146964210095, 19146964210096,19146964210097,19146964210098,19146964210099,19146964210100,19146964210101,19146964210102,19146964210103,19146964210104,19146964210105,19146964210106,19146964210107,19146964210108,19146964210109,19146964210110,19146964210111, 19146964210112,19146964210113,19146964210114,19151259172864,19151259177384,19151259177385,19151259177386,19151259177387,19151259177388,19151259177389,19151259177390,19151259177391,19151259177392,19151259177393,19151259177394,19151259177395, 19151259177396,19151259177397,19151259177398,19151259177399,19151259177400,19151259177401,19151259177402,19151259177403,19151259177404,19151259177405,19151259177406,19151259177407,19151259177408,19151259177409,19151259177410,19155554140160, 19155554144680,19155554144681,19155554144682,19155554144683,19155554144684,19155554144685,19155554144686,19155554144687,19155554144688,19155554144689,19155554144690,19155554144691,19155554144692,19155554144693,19155554144694,19155554144695, 19155554144696,19155554144697,19155554144698,19155554144699,19155554144700,19155554144701,19155554144702,19155554144703,19155554144704,19155554144705,19155554144706,19159849107456,19159849111976,19159849111977,19159849111978,19159849111979, 19159849111980,19159849111981,19159849111982,19159849111983,19159849111984,19159849111985,19159849111986,19159849111987,19159849111988,19159849111989,19159849111990,19159849111991,19159849111992,19159849111993,19159849111994,19159849111995, 19159849111996,19159849111997,19159849111998,19159849111999,19159849112000,19159849112001,19159849112002,19164144074752,19164144079272,19164144079273,19164144079274,19164144079275,19164144079276,19164144079277,19164144079278,19164144079279, 19164144079280,19164144079281,19164144079282,19164144079283,19164144079284,19164144079285,19164144079286,19164144079287,19164144079288,19164144079289,19164144079290,19164144079291,19164144079292,19164144079293,19164144079294,19164144079295, 19164144079296,19164144079297,19164144079298,19168439042048,19168439046568,19168439046569,19168439046570,19168439046571,19168439046572,19168439046573,19168439046574,19168439046575,19168439046576,19168439046577,19168439046578,19168439046579, 19168439046580,19168439046581,19168439046582,19168439046583,19168439046584,19168439046585,19168439046586,19168439046587,19168439046588,19168439046589,19168439046590,19168439046591,19168439046592,19168439046593,19168439046594,19172734009344, 19172734013864,19172734013865,19172734013866,19172734013867,19172734013868,19172734013869,19172734013870,19172734013871,19172734013872,19172734013873,19172734013874,19172734013875,19172734013876,19172734013877,19172734013878,19172734013879, 19172734013880,19172734013881,19172734013882,19172734013883,19172734013884,19172734013885,19172734013886,19172734013887,19172734013888,19172734013889,19172734013890,19177028976640,19177028981160,19177028981161,19177028981162,19177028981163, 19177028981164,19177028981165,19177028981166,19177028981167,19177028981168,19177028981169,19177028981170,19177028981171,19177028981172,19177028981173,19177028981174,19177028981175,19177028981176,19177028981177,19177028981178,19177028981179, 19177028981180,19177028981181,19177028981182,19177028981183,19177028981184,19177028981185,19177028981186,19181323943936,19181323948456,19181323948457,19181323948458,19181323948459,19181323948460,19181323948461,19181323948462,19181323948463, 19181323948464,19181323948465,19181323948466,19181323948467,19181323948468,19181323948469,19181323948470,19181323948471,19181323948472,19181323948473,19181323948474,19181323948475,19181323948476,19181323948477,19181323948478,19181323948479, 19181323948480,19181323948481,19181323948482,19185618911232,19185618915752,19185618915753,19185618915754,19185618915755,19185618915756,19185618915757,19185618915758,19185618915759,19185618915760,19185618915761,19185618915762,19185618915763, 19185618915764,19185618915765,19185618915766,19185618915767,19185618915768,19185618915769,19185618915770,19185618915771,19185618915772,19185618915773,19185618915774,19185618915775,19185618915776,19185618915777,19185618915778,19189913878528, 19189913883048,19189913883049,19189913883050,19189913883051,19189913883052,19189913883053,19189913883054,19189913883055,19189913883056,19189913883057,19189913883058,19189913883059,19189913883060,19189913883061,19189913883062,19189913883063, 19189913883064,19189913883065,19189913883066,19189913883067,19189913883068,19189913883069,19189913883070,19189913883071,19189913883072,19189913883073,19189913883074,19194208845824,19194208850344,19194208850345,19194208850346,19194208850347, 19194208850348,19194208850349,19194208850350,19194208850351,19194208850352,19194208850353,19194208850354,19194208850355,19194208850356,19194208850357,19194208850358,19194208850359,19194208850360,19194208850361,19194208850362,19194208850363, 19194208850364,19194208850365,19194208850366,19194208850367,19194208850368,19194208850369,19194208850370,19108309499904,19108309504424,19108309504425,19108309504426,19108309504427,19108309504428,19108309504429,19108309504430,19108309504431, 19108309504432,19108309504433,19108309504434,19108309504435,19108309504436,19108309504437,19108309504438,19108309504439,19108309504440,19108309504441,19108309504442,19108309504443,19108309504444,19108309504445,19108309504446,19108309504447, 19108309504448,19108309504449,19108309504450,19112604467200,19112604471720,19112604471721,19112604471722,19112604471723,19112604471724,19112604471725,19112604471726,19112604471727,19112604471728,19112604471729,19112604471730,19112604471731, 19112604471732,19112604471733,19112604471734,19112604471735,19112604471736,19112604471737,19112604471738,19112604471739,19112604471740,19112604471741,19112604471742,19112604471743,19112604471744,19112604471745,19112604471746,19116899434496, 19116899439016,19116899439017,19116899439018,19116899439019,19116899439020,19116899439021,19116899439022,19116899439023,19116899439024,19116899439025,19116899439026,19116899439027,19116899439028,19116899439029,19116899439030,19116899439031, 19116899439032,19116899439033,19116899439034,19116899439035,19116899439036,19116899439037,19116899439038,19116899439039,19116899439040,19116899439041,19116899439042,19121194401792,19121194406312,19121194406313,19121194406314,19121194406315, 19121194406316,19121194406317,19121194406318,19121194406319,19121194406320,19121194406321,19121194406322,19121194406323,19121194406324,19121194406325,19121194406326,19121194406327,19121194406328,19121194406329,19121194406330,19121194406331, 19121194406332,19121194406333,19121194406334,19121194406335,19121194406336,19121194406337,19121194406338,19125489369088,19125489373608,19125489373609,19125489373610,19125489373611,19125489373612,19125489373613,19125489373614,19125489373615, 19125489373616,19125489373617,19125489373618,19125489373619,19125489373620,19125489373621,19125489373622,19125489373623,19125489373624,19125489373625,19125489373626,19125489373627,19125489373628,19125489373629,19125489373630,19125489373631, 19125489373632,19125489373633,19125489373634,19129784336384,19129784340904,19129784340905,19129784340906,19129784340907,19129784340908,19129784340909,19129784340910,19129784340911,19129784340912,19129784340913,19129784340914,19129784340915, 19129784340916,19129784340917,19129784340918,19129784340919,19129784340920,19129784340921,19129784340922,19129784340923,19129784340924,19129784340925,19129784340926,19129784340927,19129784340928,19129784340929,19129784340930,19134079303680, 19134079308200,19134079308201,19134079308202,19134079308203,19134079308204,19134079308205,19134079308206,19134079308207,19134079308208,19134079308209,19134079308210,19134079308211,19134079308212,19134079308213,19134079308214,19134079308215, 19134079308216,19134079308217,19134079308218,19134079308219,19134079308220,19134079308221,19134079308222,19134079308223,19134079308224,19134079308225,19134079308226,19138374270976,19138374275496,19138374275497,19138374275498,19138374275499, 19138374275500,19138374275501,19138374275502,19138374275503,19138374275504,19138374275505,19138374275506,19138374275507,19138374275508,19138374275509,19138374275510,19138374275511,19138374275512,19138374275513,19138374275514,19138374275515, 19138374275516,19138374275517,19138374275518,19138374275519,19138374275520,19138374275521,19138374275522,19142669238272,19142669242792,19142669242793,19142669242794,19142669242795,19142669242796,19142669242797,19142669242798,19142669242799, 19142669242800,19142669242801,19142669242802,19142669242803,19142669242804,19142669242805,19142669242806,19142669242807,19142669242808,19142669242809,19142669242810,19142669242811,19142669242812,19142669242813,19142669242814,19142669242815, 19142669242816,19142669242817,19142669242818,19146964205568,19146964210088,19146964210089,19146964210090,19146964210091,19146964210092,19146964210093,19146964210094,19146964210095,19146964210096,19146964210097,19146964210098,19146964210099, 19146964210100,19146964210101,19146964210102,19146964210103,19146964210104,19146964210105,19146964210106,19146964210107,19146964210108,19146964210109,19146964210110,19146964210111,19146964210112,19146964210113,19146964210114,19151259172864, 19151259177384,19151259177385,19151259177386,19151259177387,19151259177388,19151259177389,19151259177390,19151259177391,19151259177392,19151259177393,19151259177394,19151259177395,19151259177396,19151259177397,19151259177398,19151259177399, 19151259177400,19151259177401,19151259177402,19151259177403,19151259177404,19151259177405,19151259177406,19151259177407,19151259177408,19151259177409,19151259177410,19155554140160,19155554144680,19155554144681,19155554144682,19155554144683, 19155554144684,19155554144685,19155554144686,19155554144687,19155554144688,19155554144689,19155554144690,19155554144691,19155554144692,19155554144693,19155554144694,19155554144695,19155554144696,19155554144697,19155554144698,19155554144699, 19155554144700,19155554144701,19155554144702,19155554144703,19155554144704,19155554144705,19155554144706,19159849107456,19159849111976,19159849111977,19159849111978,19159849111979,19159849111980,19159849111981,19159849111982,19159849111983, 19159849111984,19159849111985,19159849111986,19159849111987,19159849111988,19159849111989,19159849111990,19159849111991,19159849111992,19159849111993,19159849111994,19159849111995,19159849111996,19159849111997,19159849111998,19159849111999, 19159849112000,19159849112001,19159849112002,19164144074752,19164144079272,19164144079273,19164144079274,19164144079275,19164144079276,19164144079277,19164144079278,19164144079279,19164144079280,19164144079281,19164144079282,19164144079283, 19164144079284,19164144079285,19164144079286,19164144079287,19164144079288,19164144079289,19164144079290,19164144079291,19164144079292,19164144079293,19164144079294,19164144079295,19164144079296,19164144079297,19164144079298,19168439042048, 19168439046568,19168439046569,19168439046570,19168439046571,19168439046572,19168439046573,19168439046574,19168439046575,19168439046576,19168439046577,19168439046578,19168439046579,19168439046580,19168439046581,19168439046582,19168439046583, 19168439046584,19168439046585,19168439046586,19168439046587,19168439046588,19168439046589,19168439046590,19168439046591,19168439046592,19168439046593,19168439046594,19172734009344,19172734013864,19172734013865,19172734013866,19172734013867, 19172734013868,19172734013869,19172734013870,19172734013871,19172734013872,19172734013873,19172734013874,19172734013875,19172734013876,19172734013877,19172734013878,19172734013879,19172734013880,19172734013881,19172734013882,19172734013883, 19172734013884,19172734013885,19172734013886,19172734013887,19172734013888,19172734013889,19172734013890,19177028976640,19177028981160,19177028981161,19177028981162,19177028981163,19177028981164,19177028981165,19177028981166,19177028981167, 19177028981168,19177028981169,19177028981170,19177028981171,19177028981172,19177028981173,19177028981174,19177028981175,19177028981176,19177028981177,19177028981178,19177028981179,19177028981180,19177028981181,19177028981182,19177028981183, 19177028981184,19177028981185,19177028981186,19181323943936,19181323948456,19181323948457,19181323948458,19181323948459,19181323948460,19181323948461,19181323948462,19181323948463,19181323948464,19181323948465,19181323948466,19181323948467, 19181323948468,19181323948469,19181323948470,19181323948471,19181323948472,19181323948473,19181323948474,19181323948475,19181323948476,19181323948477,19181323948478,19181323948479,19181323948480,19181323948481,19181323948482,19185618911232, 19185618915752,19185618915753,19185618915754,19185618915755,19185618915756,19185618915757,19185618915758,19185618915759,19185618915760,19185618915761,19185618915762,19185618915763,19185618915764,19185618915765,19185618915766,19185618915767, 19185618915768,19185618915769,19185618915770,19185618915771,19185618915772,19185618915773,19185618915774,19185618915775,19185618915776,19185618915777,19185618915778,19189913878528,19189913883048,19189913883049,19189913883050,19189913883051, 19189913883052,19189913883053,19189913883054,19189913883055,19189913883056,19189913883057,19189913883058,19189913883059,19189913883060,19189913883061,19189913883062,19189913883063,19189913883064,19189913883065,19189913883066,19189913883067, 19189913883068,19189913883069,19189913883070,19189913883071,19189913883072,19189913883073,19189913883074,19194208845824,19194208850344,19194208850345,19194208850346,19194208850347,19194208850348,19194208850349,19194208850350,19194208850351, 19194208850352,19194208850353,19194208850354,19194208850355,19194208850356,19194208850357,19194208850358,19194208850359,19194208850360,19194208850361,19194208850362,19194208850363,19194208850364,19194208850365,19194208850366,19194208850367, 19194208850368,19194208850369,19194208850370,19108309499904,19108309504424,19108309504425,19108309504426,19108309504427,19108309504428,19108309504429,19108309504430,19108309504431,19108309504432,19108309504433,19108309504434,19108309504435, 19108309504436,19108309504437,19108309504438,19108309504439,19108309504440,19108309504441,19108309504442,19108309504443,19108309504444,19108309504445,19108309504446,19108309504447,19108309504448,19108309504449,19108309504450,19112604467200, 19112604471720,19112604471721,19112604471722,19112604471723,19112604471724,19112604471725,19112604471726,19112604471727,19112604471728,19112604471729,19112604471730,19112604471731,19112604471732,19112604471733,19112604471734,19112604471735, 19112604471736,19112604471737,19112604471738,19112604471739,19112604471740,19112604471741,19112604471742,19112604471743,19112604471744,19112604471745,19112604471746,19116899434496,19116899439016,19116899439017,19116899439018,19116899439019, 19116899439020,19116899439021,19116899439022,19116899439023,19116899439024,19116899439025,19116899439026,19116899439027,19116899439028,19116899439029,19116899439030,19116899439031,19116899439032,19116899439033,19116899439034,19116899439035, 19116899439036,19116899439037,19116899439038,19116899439039,19116899439040,19116899439041,19116899439042,19121194401792,19121194406312,19121194406313,19121194406314,19121194406315,19121194406316,19121194406317,19121194406318,19121194406319, 19121194406320,19121194406321,19121194406322,19121194406323,19121194406324,19121194406325,19121194406326,19121194406327,19121194406328,19121194406329,19121194406330,19121194406331,19121194406332,19121194406333,19121194406334,19121194406335, 19121194406336,19121194406337,19121194406338,19125489369088,19125489373608,19125489373609,19125489373610,19125489373611,19125489373612,19125489373613,19125489373614,19125489373615,19125489373616,19125489373617,19125489373618,19125489373619, 19125489373620,19125489373621,19125489373622,19125489373623,19125489373624,19125489373625,19125489373626,19125489373627,19125489373628,19125489373629,19125489373630,19125489373631,19125489373632,19125489373633,19125489373634,19129784336384, 19129784340904,19129784340905,19129784340906,19129784340907,19129784340908,19129784340909,19129784340910,19129784340911,19129784340912,19129784340913,19129784340914,19129784340915,19129784340916,19129784340917,19129784340918,19129784340919, 19129784340920,19129784340921,19129784340922,19129784340923,19129784340924,19129784340925,19129784340926,19129784340927,19129784340928,19129784340929,19129784340930,19134079303680,19134079308200,19134079308201,19134079308202,19134079308203, 19134079308204,19134079308205,19134079308206,19134079308207,19134079308208,19134079308209,19134079308210,19134079308211,19134079308212,19134079308213,19134079308214,19134079308215,19134079308216,19134079308217,19134079308218,19134079308219, 19134079308220,19134079308221,19134079308222,19134079308223,19134079308224,19134079308225,19134079308226,19138374270976,19138374275496,19138374275497,19138374275498,19138374275499,19138374275500,19138374275501,19138374275502,19138374275503, 19138374275504,19138374275505,19138374275506,19138374275507,19138374275508,19138374275509,19138374275510,19138374275511,19138374275512,19138374275513,19138374275514,19138374275515,19138374275516,19138374275517,19138374275518,19138374275519, 19138374275520,19138374275521,19138374275522,19142669238272,19142669242792,19142669242793,19142669242794,19142669242795,19142669242796,19142669242797,19142669242798,19142669242799,19142669242800,19142669242801,19142669242802,19142669242803, 19142669242804,19142669242805,19142669242806,19142669242807,19142669242808,19142669242809,19142669242810,19142669242811,19142669242812,19142669242813,19142669242814,19142669242815,19142669242816,19142669242817,19142669242818,19146964205568, 19146964210088,19146964210089,19146964210090,19146964210091,19146964210092,19146964210093,19146964210094,19146964210095,19146964210096,19146964210097,19146964210098,19146964210099,19146964210100,19146964210101,19146964210102,19146964210103, 19146964210104,19146964210105,19146964210106,19146964210107,19146964210108,19146964210109,19146964210110,19146964210111,19146964210112,19146964210113,19146964210114,19151259172864,19151259177384,19151259177385,19151259177386,19151259177387, 19151259177388,19151259177389,19151259177390,19151259177391,19151259177392,19151259177393,19151259177394,19151259177395,19151259177396,19151259177397,19151259177398,19151259177399,19151259177400,19151259177401,19151259177402,19151259177403, 19151259177404,19151259177405,19151259177406,19151259177407,19151259177408,19151259177409,19151259177410,19155554140160,19155554144680,19155554144681,19155554144682,19155554144683,19155554144684,19155554144685,19155554144686,19155554144687, 19155554144688,19155554144689,19155554144690,19155554144691,19155554144692,19155554144693,19155554144694,19155554144695,19155554144696,19155554144697,19155554144698,19155554144699,19155554144700,19155554144701,19155554144702,19155554144703, 19155554144704,19155554144705,19155554144706,19159849107456,19159849111976,19159849111977,19159849111978,19159849111979,19159849111980,19159849111981,19159849111982,19159849111983,19159849111984,19159849111985,19159849111986,19159849111987, 19159849111988,19159849111989,19159849111990,19159849111991,19159849111992,19159849111993,19159849111994,19159849111995,19159849111996,19159849111997,19159849111998,19159849111999,19159849112000,19159849112001,19159849112002,19164144074752, 19164144079272,19164144079273,19164144079274,19164144079275,19164144079276,19164144079277,19164144079278,19164144079279,19164144079280,19164144079281,19164144079282,19164144079283,19164144079284,19164144079285,19164144079286,19164144079287, 19164144079288,19164144079289,19164144079290,19164144079291,19164144079292,19164144079293,19164144079294,19164144079295,19164144079296,19164144079297,19164144079298,19168439042048,19168439046568,19168439046569,19168439046570,19168439046571, 19168439046572,19168439046573,19168439046574,19168439046575,19168439046576,19168439046577,19168439046578,19168439046579,19168439046580,19168439046581,19168439046582,19168439046583,19168439046584,19168439046585,19168439046586,19168439046587, 19168439046588,19168439046589,19168439046590,19168439046591,19168439046592,19168439046593,19168439046594,19172734009344,19172734013864,19172734013865,19172734013866,19172734013867,19172734013868,19172734013869,19172734013870,19172734013871, 19172734013872,19172734013873,19172734013874,19172734013875,19172734013876,19172734013877,19172734013878,19172734013879,19172734013880,19172734013881,19172734013882,19172734013883,19172734013884,19172734013885,19172734013886,19172734013887, 19172734013888,19172734013889,19172734013890,19177028976640,19177028981160,19177028981161,19177028981162,19177028981163,19177028981164,19177028981165,19177028981166,19177028981167,19177028981168,19177028981169,19177028981170,19177028981171, 19177028981172,19177028981173,19177028981174,19177028981175,19177028981176,19177028981177,19177028981178,19177028981179,19177028981180,19177028981181,19177028981182,19177028981183,19177028981184,19177028981185,19177028981186,19181323943936, 19181323948456,19181323948457,19181323948458,19181323948459,19181323948460,19181323948461,19181323948462,19181323948463,19181323948464,19181323948465,19181323948466,19181323948467,19181323948468,19181323948469,19181323948470,19181323948471, 19181323948472,19181323948473,19181323948474,19181323948475,19181323948476,19181323948477,19181323948478,19181323948479,19181323948480,19181323948481,19181323948482,19185618911232,19185618915752,19185618915753,19185618915754,19185618915755, 19185618915756,19185618915757,19185618915758,19185618915759,19185618915760,19185618915761,19185618915762,19185618915763,19185618915764,19185618915765,19185618915766,19185618915767,19185618915768,19185618915769,19185618915770,19185618915771, 19185618915772,19185618915773,19185618915774,19185618915775,19185618915776,19185618915777,19185618915778,19189913878528,19189913883048,19189913883049,19189913883050,19189913883051,19189913883052,19189913883053,19189913883054,19189913883055, 19189913883056,19189913883057,19189913883058,19189913883059,19189913883060,19189913883061,19189913883062,19189913883063,19189913883064,19189913883065,19189913883066,19189913883067,19189913883068,19189913883069,19189913883070,19189913883071, 19189913883072,19189913883073,19189913883074,19194208845824,19194208850344,19194208850345,19194208850346,19194208850347,19194208850348,19194208850349,19194208850350,19194208850351,19194208850352,19194208850353,19194208850354,19194208850355, 19194208850356,19194208850357,19194208850358,19194208850359,19194208850360,19194208850361,19194208850362,19194208850363,19194208850364,19194208850365,19194208850366,19194208850367,19194208850368,19194208850369,19194208850370,19108309499904, 19108309504424,19108309504425,19108309504426,19108309504427,19108309504428,19108309504429,19108309504430,19108309504431,19108309504432,19108309504433,19108309504434,19108309504435,19108309504436,19108309504437,19108309504438,19108309504439, 19108309504440,19108309504441,19108309504442,19108309504443,19108309504444,19108309504445,19108309504446,19108309504447,19108309504448,19108309504449,19108309504450,19112604467200,19112604471720,19112604471721,19112604471722,19112604471723, 19112604471724,19112604471725,19112604471726,19112604471727,19112604471728,19112604471729,19112604471730,19112604471731,19112604471732,19112604471733,19112604471734,19112604471735,19112604471736,19112604471737,19112604471738,19112604471739, 19112604471740,19112604471741,19112604471742,19112604471743,19112604471744,19112604471745,19112604471746,19116899434496,19116899439016,19116899439017,19116899439018,19116899439019,19116899439020,19116899439021,19116899439022,19116899439023, 19116899439024,19116899439025,19116899439026,19116899439027,19116899439028,19116899439029,19116899439030,19116899439031,19116899439032,19116899439033,19116899439034,19116899439035,19116899439036,19116899439037,19116899439038,19116899439039, 19116899439040,19116899439041,19116899439042,19121194401792,19121194406312,19121194406313,19121194406314,19121194406315,19121194406316,19121194406317,19121194406318,19121194406319,19121194406320,19121194406321,19121194406322,19121194406323, 19121194406324,19121194406325,19121194406326,19121194406327,19121194406328,19121194406329,19121194406330,19121194406331,19121194406332,19121194406333,19121194406334,19121194406335,19121194406336,19121194406337,19121194406338,19125489369088, 19125489373608,19125489373609,19125489373610,19125489373611,19125489373612,19125489373613,19125489373614,19125489373615,19125489373616,19125489373617,19125489373618,19125489373619,19125489373620,19125489373621,19125489373622,19125489373623, 19125489373624,19125489373625,19125489373626,19125489373627,19125489373628,19125489373629,19125489373630,19125489373631,19125489373632,19125489373633,19125489373634,19129784336384,19129784340904,19129784340905,19129784340906,19129784340907, 19129784340908,19129784340909,19129784340910,19129784340911,19129784340912,19129784340913,19129784340914,19129784340915,19129784340916,19129784340917,19129784340918,19129784340919,19129784340920,19129784340921,19129784340922,19129784340923, 19129784340924,19129784340925,19129784340926,19129784340927,19129784340928,19129784340929,19129784340930,19134079303680,19134079308200,19134079308201,19134079308202,19134079308203,19134079308204,19134079308205,19134079308206,19134079308207, 19134079308208,19134079308209,19134079308210,19134079308211,19134079308212,19134079308213,19134079308214,19134079308215,19134079308216,19134079308217,19134079308218,19134079308219,19134079308220,19134079308221,19134079308222,19134079308223, 19134079308224,19134079308225,19134079308226,19138374270976,19138374275496,19138374275497,19138374275498,19138374275499,19138374275500,19138374275501,19138374275502,19138374275503,19138374275504,19138374275505,19138374275506,19138374275507, 19138374275508,19138374275509,19138374275510,19138374275511,19138374275512,19138374275513,19138374275514,19138374275515,19138374275516,19138374275517,19138374275518,19138374275519,19138374275520,19138374275521,19138374275522,19142669238272, 19142669242792,19142669242793,19142669242794,19142669242795,19142669242796,19142669242797,19142669242798,19142669242799,19142669242800,19142669242801,19142669242802,19142669242803,19142669242804,19142669242805,19142669242806,19142669242807, 19142669242808,19142669242809,19142669242810,19142669242811,19142669242812,19142669242813,19142669242814,19142669242815,19142669242816,19142669242817,19142669242818,19146964205568,19146964210088,19146964210089,19146964210090,19146964210091, 19146964210092,19146964210093,19146964210094,19146964210095,19146964210096,19146964210097,19146964210098,19146964210099,19146964210100,19146964210101,19146964210102,19146964210103,19146964210104,19146964210105,19146964210106,19146964210107, 19146964210108,19146964210109,19146964210110,19146964210111,19146964210112,19146964210113,19146964210114,19151259172864,19151259177384,19151259177385,19151259177386,19151259177387,19151259177388,19151259177389,19151259177390,19151259177391, 19151259177392,19151259177393,19151259177394,19151259177395,19151259177396,19151259177397,19151259177398,19151259177399,19151259177400,19151259177401,19151259177402,19151259177403,19151259177404,19151259177405,19151259177406,19151259177407, 19151259177408,19151259177409,19151259177410,19155554140160,19155554144680,19155554144681,19155554144682,19155554144683,19155554144684,19155554144685,19155554144686,19155554144687,19155554144688,19155554144689,19155554144690,19155554144691, 19155554144692,19155554144693,19155554144694,19155554144695,19155554144696,19155554144697,19155554144698,19155554144699,19155554144700,19155554144701,19155554144702,19155554144703,19155554144704,19155554144705,19155554144706,19159849107456, 19159849111976,19159849111977,19159849111978,19159849111979,19159849111980,19159849111981,19159849111982,19159849111983,19159849111984,19159849111985,19159849111986,19159849111987,19159849111988,19159849111989,19159849111990,19159849111991, 19159849111992,19159849111993,19159849111994,19159849111995,19159849111996,19159849111997,19159849111998,19159849111999,19159849112000,19159849112001,19159849112002,19164144074752,19164144079272,19164144079273,19164144079274,19164144079275, 19164144079276,19164144079277,19164144079278,19164144079279,19164144079280,19164144079281,19164144079282,19164144079283,19164144079284,19164144079285,19164144079286,19164144079287,19164144079288,19164144079289,19164144079290,19164144079291, 19164144079292,19164144079293,19164144079294,19164144079295,19164144079296,19164144079297,19164144079298,19168439042048,19168439046568,19168439046569,19168439046570,19168439046571,19168439046572,19168439046573,19168439046574,19168439046575, 19168439046576,19168439046577,19168439046578,19168439046579,19168439046580,19168439046581,19168439046582,19168439046583,19168439046584,19168439046585,19168439046586,19168439046587,19168439046588,19168439046589,19168439046590,19168439046591, 19168439046592,19168439046593,19168439046594,19172734009344,19172734013864,19172734013865,19172734013866,19172734013867,19172734013868,19172734013869,19172734013870,19172734013871,19172734013872,19172734013873,19172734013874,19172734013875, 19172734013876,19172734013877,19172734013878,19172734013879,19172734013880,19172734013881,19172734013882,19172734013883,19172734013884,19172734013885,19172734013886,19172734013887,19172734013888,19172734013889,19172734013890,19177028976640, 19177028981160,19177028981161,19177028981162,19177028981163,19177028981164,19177028981165,19177028981166,19177028981167,19177028981168,19177028981169,19177028981170,19177028981171,19177028981172,19177028981173,19177028981174,19177028981175, 19177028981176,19177028981177,19177028981178,19177028981179,19177028981180,19177028981181,19177028981182,19177028981183,19177028981184,19177028981185,19177028981186,19181323943936,19181323948456,19181323948457,19181323948458,19181323948459, 19181323948460,19181323948461,19181323948462,19181323948463,19181323948464,19181323948465,19181323948466,19181323948467,19181323948468,19181323948469,19181323948470,19181323948471,19181323948472,19181323948473,19181323948474,19181323948475, 19181323948476,19181323948477,19181323948478,19181323948479,19181323948480,19181323948481,19181323948482,19185618911232,19185618915752,19185618915753,19185618915754,19185618915755,19185618915756,19185618915757,19185618915758,19185618915759, 19185618915760,19185618915761,19185618915762,19185618915763,19185618915764,19185618915765,19185618915766,19185618915767,19185618915768,19185618915769,19185618915770,19185618915771,19185618915772,19185618915773,19185618915774,19185618915775, 19185618915776,19185618915777,19185618915778,19189913878528,19189913883048,19189913883049,19189913883050,19189913883051,19189913883052,19189913883053,19189913883054,19189913883055,19189913883056,19189913883057,19189913883058,19189913883059, 19189913883060,19189913883061,19189913883062,19189913883063,19189913883064,19189913883065,19189913883066,19189913883067,19189913883068,19189913883069,19189913883070,19189913883071,19189913883072,19189913883073,19189913883074,19194208845824, 19194208850344,19194208850345,19194208850346,19194208850347,19194208850348,19194208850349,19194208850350,19194208850351,19194208850352,19194208850353,19194208850354,19194208850355,19194208850356,19194208850357,19194208850358,19194208850359, 19194208850360,19194208850361,19194208850362,19194208850363,19194208850364,19194208850365,19194208850366,19194208850367,19194208850368,19194208850369,19194208850370,19108309499904,19108309504424,19108309504425,19108309504426,19108309504427, 19108309504428,19108309504429,19108309504430,19108309504431,19108309504432,19108309504433,19108309504434,19108309504435,19108309504436,19108309504437,19108309504438,19108309504439,19108309504440,19108309504441,19108309504442,19108309504443, 19108309504444,19108309504445,19108309504446,19108309504447,19108309504448,19108309504449,19108309504450,19112604467200,19112604471720,19112604471721,19112604471722,19112604471723,19112604471724,19112604471725,19112604471726,19112604471727, 19112604471728,19112604471729,19112604471730,19112604471731,19112604471732,19112604471733,19112604471734,19112604471735,19112604471736,19112604471737,19112604471738,19112604471739,19112604471740,19112604471741,19112604471742,19112604471743, 19112604471744,19112604471745,19112604471746,19116899434496,19116899439016,19116899439017,19116899439018,19116899439019,19116899439020,19116899439021,19116899439022,19116899439023,19116899439024,19116899439025,19116899439026,19116899439027, 19116899439028,19116899439029,19116899439030,19116899439031,19116899439032,19116899439033,19116899439034,19116899439035,19116899439036,19116899439037,19116899439038,19116899439039,19116899439040,19116899439041,19116899439042,19121194401792, 19121194406312,19121194406313,19121194406314,19121194406315,19121194406316,19121194406317,19121194406318,19121194406319,19121194406320,19121194406321,19121194406322,19121194406323,19121194406324,19121194406325,19121194406326,19121194406327, 19121194406328,19121194406329,19121194406330,19121194406331,19121194406332,19121194406333,19121194406334,19121194406335,19121194406336,19121194406337,19121194406338,19125489369088,19125489373608,19125489373609,19125489373610,19125489373611, 19125489373612,19125489373613,19125489373614,19125489373615,19125489373616,19125489373617,19125489373618,19125489373619,19125489373620,19125489373621,19125489373622,19125489373623,19125489373624,19125489373625,19125489373626,19125489373627, 19125489373628,19125489373629,19125489373630,19125489373631,19125489373632,19125489373633,19125489373634,19129784336384,19129784340904,19129784340905,19129784340906,19129784340907,19129784340908,19129784340909,19129784340910,19129784340911, 19129784340912,19129784340913,19129784340914,19129784340915,19129784340916,19129784340917,19129784340918,19129784340919,19129784340920,19129784340921,19129784340922,19129784340923,19129784340924,19129784340925,19129784340926,19129784340927, 19129784340928,19129784340929,19129784340930,19134079303680,19134079308200,19134079308201,19134079308202,19134079308203,19134079308204,19134079308205,19134079308206,19134079308207,19134079308208,19134079308209,19134079308210,19134079308211, 19134079308212,19134079308213,19134079308214,19134079308215,19134079308216,19134079308217,19134079308218,19134079308219,19134079308220,19134079308221,19134079308222,19134079308223,19134079308224,19134079308225,19134079308226,19138374270976, 19138374275496,19138374275497,19138374275498,19138374275499,19138374275500,19138374275501,19138374275502,19138374275503,19138374275504,19138374275505,19138374275506,19138374275507,19138374275508,19138374275509,19138374275510,19138374275511, 19138374275512,19138374275513,19138374275514,19138374275515,19138374275516,19138374275517,19138374275518,19138374275519,19138374275520,19138374275521,19138374275522,19142669238272,19142669242792,19142669242793,19142669242794,19142669242795, 19142669242796,19142669242797,19142669242798,19142669242799,19142669242800,19142669242801,19142669242802,19142669242803,19142669242804,19142669242805,19142669242806,19142669242807,19142669242808,19142669242809,19142669242810,19142669242811, 19142669242812,19142669242813,19142669242814,19142669242815,19142669242816,19142669242817,19142669242818,19146964205568,19146964210088,19146964210089,19146964210090,19146964210091,19146964210092,19146964210093,19146964210094,19146964210095, 19146964210096,19146964210097,19146964210098,19146964210099,19146964210100,19146964210101,19146964210102,19146964210103,19146964210104,19146964210105,19146964210106,19146964210107,19146964210108,19146964210109,19146964210110,19146964210111, 19146964210112,19146964210113,19146964210114,19151259172864,19151259177384,19151259177385,19151259177386,19151259177387,19151259177388,19151259177389,19151259177390,19151259177391,19151259177392,19151259177393,19151259177394,19151259177395, 19151259177396,19151259177397,19151259177398,19151259177399,19151259177400,19151259177401,19151259177402,19151259177403,19151259177404,19151259177405,19151259177406,19151259177407,19151259177408,19151259177409,19151259177410,19155554140160, 19155554144680,19155554144681,19155554144682,19155554144683,19155554144684,19155554144685,19155554144686,19155554144687,19155554144688,19155554144689,19155554144690,19155554144691,19155554144692,19155554144693,19155554144694,19155554144695, 19155554144696,19155554144697,19155554144698,19155554144699,19155554144700,19155554144701,19155554144702,19155554144703,19155554144704,19155554144705,19155554144706,19159849107456,19159849111976,19159849111977,19159849111978,19159849111979, 19159849111980,19159849111981,19159849111982,19159849111983,19159849111984,19159849111985,19159849111986,19159849111987,19159849111988,19159849111989,19159849111990,19159849111991,19159849111992,19159849111993,19159849111994,19159849111995, 19159849111996,19159849111997,19159849111998,19159849111999,19159849112000,19159849112001,19159849112002,19164144074752,19164144079272,19164144079273,19164144079274,19164144079275,19164144079276,19164144079277,19164144079278,19164144079279, 19164144079280,19164144079281,19164144079282,19164144079283,19164144079284,19164144079285,19164144079286,19164144079287,19164144079288,19164144079289,19164144079290,19164144079291,19164144079292,19164144079293,19164144079294,19164144079295, 19164144079296,19164144079297,19164144079298,19168439042048,19168439046568,19168439046569,19168439046570,19168439046571,19168439046572,19168439046573,19168439046574,19168439046575,19168439046576,19168439046577,19168439046578,19168439046579, 19168439046580,19168439046581,19168439046582,19168439046583,19168439046584,19168439046585,19168439046586,19168439046587,19168439046588,19168439046589,19168439046590,19168439046591,19168439046592,19168439046593,19168439046594,19172734009344, 19172734013864,19172734013865,19172734013866,19172734013867,19172734013868,19172734013869,19172734013870,19172734013871,19172734013872,19172734013873,19172734013874,19172734013875,19172734013876,19172734013877,19172734013878,19172734013879, 19172734013880,19172734013881,19172734013882,19172734013883,19172734013884,19172734013885,19172734013886,19172734013887,19172734013888,19172734013889,19172734013890,19177028976640,19177028981160,19177028981161,19177028981162,19177028981163, 19177028981164,19177028981165,19177028981166,19177028981167,19177028981168,19177028981169,19177028981170,19177028981171,19177028981172,19177028981173,19177028981174,19177028981175,19177028981176,19177028981177,19177028981178,19177028981179, 19177028981180,19177028981181,19177028981182,19177028981183,19177028981184,19177028981185,19177028981186,19181323943936,19181323948456,19181323948457,19181323948458,19181323948459,19181323948460,19181323948461,19181323948462,19181323948463, 19181323948464,19181323948465,19181323948466,19181323948467,19181323948468,19181323948469,19181323948470,19181323948471,19181323948472,19181323948473,19181323948474,19181323948475,19181323948476,19181323948477,19181323948478,19181323948479, 19181323948480,19181323948481,19181323948482,19185618911232,19185618915752,19185618915753,19185618915754,19185618915755,19185618915756,19185618915757,19185618915758,19185618915759,19185618915760,19185618915761,19185618915762,19185618915763, 19185618915764,19185618915765,19185618915766,19185618915767,19185618915768,19185618915769,19185618915770,19185618915771,19185618915772,19185618915773,19185618915774,19185618915775,19185618915776,19185618915777,19185618915778,19189913878528, 19189913883048,19189913883049,19189913883050,19189913883051,19189913883052,19189913883053,19189913883054,19189913883055,19189913883056,19189913883057,19189913883058,19189913883059,19189913883060,19189913883061,19189913883062,19189913883063, 19189913883064,19189913883065,19189913883066,19189913883067,19189913883068,19189913883069,19189913883070,19189913883071,19189913883072,19189913883073,19189913883074,19194208845824,19194208850344,19194208850345,19194208850346,19194208850347, 19194208850348,19194208850349,19194208850350,19194208850351,19194208850352,19194208850353,19194208850354,19194208850355,19194208850356,19194208850357,19194208850358,19194208850359,19194208850360,19194208850361,19194208850362,19194208850363, 19194208850364,19194208850365,19194208850366,19194208850367,19194208850368,19194208850369,19194208850370,19108309499904,19108309504424,19108309504425,19108309504426,19108309504427,19108309504428,19108309504429,19108309504430,19108309504431, 19108309504432,19108309504433,19108309504434,19108309504435,19108309504436,19108309504437,19108309504438,19108309504439,19108309504440,19108309504441,19108309504442,19108309504443,19108309504444,19108309504445,19108309504446,19108309504447, 19108309504448,19108309504449,19108309504450,19112604467200,19112604471720,19112604471721,19112604471722,19112604471723,19112604471724,19112604471725,19112604471726,19112604471727,19112604471728,19112604471729,19112604471730,19112604471731, 19112604471732,19112604471733,19112604471734,19112604471735,19112604471736,19112604471737,19112604471738,19112604471739,19112604471740,19112604471741,19112604471742,19112604471743,19112604471744,19112604471745,19112604471746,19116899434496, 19116899439016,19116899439017,19116899439018,19116899439019,19116899439020,19116899439021,19116899439022,19116899439023,19116899439024,19116899439025,19116899439026,19116899439027,19116899439028,19116899439029,19116899439030,19116899439031, 19116899439032,19116899439033,19116899439034,19116899439035,19116899439036,19116899439037,19116899439038,19116899439039,19116899439040,19116899439041,19116899439042,19121194401792,19121194406312,19121194406313,19121194406314,19121194406315, 19121194406316,19121194406317,19121194406318,19121194406319,19121194406320,19121194406321,19121194406322,19121194406323,19121194406324,19121194406325,19121194406326,19121194406327,19121194406328,19121194406329,19121194406330,19121194406331, 19121194406332,19121194406333,19121194406334,19121194406335,19121194406336,19121194406337,19121194406338,19125489369088,19125489373608,19125489373609,19125489373610,19125489373611,19125489373612,19125489373613,19125489373614,19125489373615, 19125489373616,19125489373617,19125489373618,19125489373619,19125489373620,19125489373621,19125489373622,19125489373623,19125489373624,19125489373625,19125489373626,19125489373627,19125489373628,19125489373629,19125489373630,19125489373631, 19125489373632,19125489373633,19125489373634,19129784336384,19129784340904,19129784340905,19129784340906,19129784340907,19129784340908,19129784340909,19129784340910,19129784340911,19129784340912,19129784340913,19129784340914,19129784340915, 19129784340916,19129784340917,19129784340918,19129784340919,19129784340920,19129784340921,19129784340922,19129784340923,19129784340924,19129784340925,19129784340926,19129784340927,19129784340928,19129784340929,19129784340930,19134079303680, 19134079308200,19134079308201,19134079308202,19134079308203,19134079308204,19134079308205,19134079308206,19134079308207,19134079308208,19134079308209,19134079308210,19134079308211,19134079308212,19134079308213,19134079308214,19134079308215, 19134079308216,19134079308217,19134079308218,19134079308219,19134079308220,19134079308221,19134079308222,19134079308223,19134079308224,19134079308225,19134079308226,19138374270976,19138374275496,19138374275497,19138374275498,19138374275499, 19138374275500,19138374275501,19138374275502,19138374275503,19138374275504,19138374275505,19138374275506,19138374275507,19138374275508,19138374275509,19138374275510,19138374275511,19138374275512,19138374275513,19138374275514,19138374275515, 19138374275516,19138374275517,19138374275518,19138374275519,19138374275520,19138374275521,19138374275522,19142669238272,19142669242792,19142669242793,19142669242794,19142669242795,19142669242796,19142669242797,19142669242798,19142669242799, 19142669242800,19142669242801,19142669242802,19142669242803,19142669242804,19142669242805,19142669242806,19142669242807,19142669242808,19142669242809,19142669242810,19142669242811,19142669242812,19142669242813,19142669242814,19142669242815, 19142669242816,19142669242817,19142669242818,19146964205568,19146964210088,19146964210089,19146964210090,19146964210091,19146964210092,19146964210093,19146964210094,19146964210095,19146964210096,19146964210097,19146964210098,19146964210099, 19146964210100,19146964210101,19146964210102,19146964210103,19146964210104,19146964210105,19146964210106,19146964210107,19146964210108,19146964210109,19146964210110,19146964210111,19146964210112,19146964210113,19146964210114,19151259172864, 19151259177384,19151259177385,19151259177386,19151259177387,19151259177388,19151259177389,19151259177390,19151259177391,19151259177392,19151259177393,19151259177394,19151259177395,19151259177396,19151259177397,19151259177398,19151259177399, 19151259177400,19151259177401,19151259177402,19151259177403,19151259177404,19151259177405,19151259177406,19151259177407,19151259177408,19151259177409,19151259177410,19155554140160,19155554144680,19155554144681,19155554144682,19155554144683, 19155554144684,19155554144685,19155554144686,19155554144687,19155554144688,19155554144689,19155554144690,19155554144691,19155554144692,19155554144693,19155554144694,19155554144695,19155554144696,19155554144697,19155554144698,19155554144699, 19155554144700,19155554144701,19155554144702,19155554144703,19155554144704,19155554144705,19155554144706,19159849107456,19159849111976,19159849111977,19159849111978,19159849111979,19159849111980,19159849111981,19159849111982,19159849111983, 19159849111984,19159849111985,19159849111986,19159849111987,19159849111988,19159849111989,19159849111990,19159849111991,19159849111992,19159849111993,19159849111994,19159849111995,19159849111996,19159849111997,19159849111998,19159849111999, 19159849112000,19159849112001,19159849112002,19164144074752,19164144079272,19164144079273,19164144079274,19164144079275,19164144079276,19164144079277,19164144079278,19164144079279,19164144079280,19164144079281,19164144079282,19164144079283, 19164144079284,19164144079285,19164144079286,19164144079287,19164144079288,19164144079289,19164144079290,19164144079291,19164144079292,19164144079293,19164144079294,19164144079295,19164144079296,19164144079297,19164144079298,19168439042048, 19168439046568,19168439046569,19168439046570,19168439046571,19168439046572,19168439046573,19168439046574,19168439046575,19168439046576,19168439046577,19168439046578,19168439046579,19168439046580,19168439046581,19168439046582,19168439046583, 19168439046584,19168439046585,19168439046586,19168439046587,19168439046588,19168439046589,19168439046590,19168439046591,19168439046592,19168439046593,19168439046594,19172734009344,19172734013864,19172734013865,19172734013866,19172734013867, 19172734013868,19172734013869,19172734013870,19172734013871,19172734013872,19172734013873,19172734013874,19172734013875,19172734013876,19172734013877,19172734013878,19172734013879,19172734013880,19172734013881,19172734013882,19172734013883, 19172734013884,19172734013885,19172734013886,19172734013887,19172734013888,19172734013889,19172734013890,19177028976640,19177028981160,19177028981161,19177028981162,19177028981163,19177028981164,19177028981165,19177028981166,19177028981167, 19177028981168,19177028981169,19177028981170,19177028981171,19177028981172,19177028981173,19177028981174,19177028981175,19177028981176,19177028981177,19177028981178,19177028981179,19177028981180,19177028981181,19177028981182,19177028981183, 19177028981184,19177028981185,19177028981186,19181323943936,19181323948456,19181323948457,19181323948458,19181323948459,19181323948460,19181323948461,19181323948462,19181323948463,19181323948464,19181323948465,19181323948466,19181323948467, 19181323948468,19181323948469,19181323948470,19181323948471,19181323948472,19181323948473,19181323948474,19181323948475,19181323948476,19181323948477,19181323948478,19181323948479,19181323948480,19181323948481,19181323948482,19185618911232, 19185618915752,19185618915753,19185618915754,19185618915755,19185618915756,19185618915757,19185618915758,19185618915759,19185618915760,19185618915761,19185618915762,19185618915763,19185618915764,19185618915765,19185618915766,19185618915767, 19185618915768,19185618915769,19185618915770,19185618915771,19185618915772,19185618915773,19185618915774,19185618915775,19185618915776,19185618915777,19185618915778,19189913878528,19189913883048,19189913883049,19189913883050,19189913883051, 19189913883052,19189913883053,19189913883054,19189913883055,19189913883056,19189913883057,19189913883058,19189913883059,19189913883060,19189913883061,19189913883062,19189913883063,19189913883064,19189913883065,19189913883066,19189913883067, 19189913883068,19189913883069,19189913883070,19189913883071,19189913883072,19189913883073,19189913883074,19194208845824,19194208850344,19194208850345,19194208850346,19194208850347,19194208850348,19194208850349,19194208850350,19194208850351, 19194208850352,19194208850353,19194208850354,19194208850355,19194208850356,19194208850357,19194208850358,19194208850359,19194208850360,19194208850361,19194208850362,19194208850363,19194208850364,19194208850365,19194208850366,19194208850367, 19194208850368,19194208850369,19194208850370,19108309499904,19108309504424,19108309504425,19108309504426,19108309504427,19108309504428,19108309504429,19108309504430,19108309504431,19108309504432,19108309504433,19108309504434,19108309504435, 19108309504436,19108309504437,19108309504438,19108309504439,19108309504440,19108309504441,19108309504442,19108309504443,19108309504444,19108309504445,19108309504446,19108309504447,19108309504448,19108309504449,19108309504450,19112604467200, 19112604471720,19112604471721,19112604471722,19112604471723,19112604471724,19112604471725,19112604471726,19112604471727,19112604471728,19112604471729,19112604471730,19112604471731,19112604471732,19112604471733,19112604471734,19112604471735, 19112604471736,19112604471737,19112604471738,19112604471739,19112604471740,19112604471741,19112604471742,19112604471743,19112604471744,19112604471745,19112604471746,19116899434496,19116899439016,19116899439017,19116899439018,19116899439019, 19116899439020,19116899439021,19116899439022,19116899439023,19116899439024,19116899439025,19116899439026,19116899439027,19116899439028,19116899439029,19116899439030,19116899439031,19116899439032,19116899439033,19116899439034,19116899439035, 19116899439036,19116899439037,19116899439038,19116899439039,19116899439040,19116899439041,19116899439042,19121194401792,19121194406312,19121194406313,19121194406314,19121194406315,19121194406316,19121194406317,19121194406318,19121194406319, 19121194406320,19121194406321,19121194406322,19121194406323,19121194406324,19121194406325,19121194406326,19121194406327,19121194406328,19121194406329,19121194406330,19121194406331,19121194406332,19121194406333,19121194406334,19121194406335, 19121194406336,19121194406337,19121194406338,19125489369088,19125489373608,19125489373609,19125489373610,19125489373611,19125489373612,19125489373613,19125489373614,19125489373615,19125489373616,19125489373617,19125489373618,19125489373619, 19125489373620,19125489373621,19125489373622,19125489373623,19125489373624,19125489373625,19125489373626,19125489373627,19125489373628,19125489373629,19125489373630,19125489373631,19125489373632,19125489373633,19125489373634,19129784336384, 19129784340904,19129784340905,19129784340906,19129784340907,19129784340908,19129784340909,19129784340910,19129784340911,19129784340912,19129784340913,19129784340914,19129784340915,19129784340916,19129784340917,19129784340918,19129784340919, 19129784340920,19129784340921,19129784340922,19129784340923,19129784340924,19129784340925,19129784340926,19129784340927,19129784340928,19129784340929,19129784340930,19134079303680,19134079308200,19134079308201,19134079308202,19134079308203, 19134079308204,19134079308205,19134079308206,19134079308207,19134079308208,19134079308209,19134079308210,19134079308211,19134079308212,19134079308213,19134079308214,19134079308215,19134079308216,19134079308217,19134079308218,19134079308219, 19134079308220,19134079308221,19134079308222,19134079308223,19134079308224,19134079308225,19134079308226,19138374270976,19138374275496,19138374275497,19138374275498,19138374275499,19138374275500,19138374275501,19138374275502,19138374275503, 19138374275504,19138374275505,19138374275506,19138374275507,19138374275508,19138374275509,19138374275510,19138374275511,19138374275512,19138374275513,19138374275514,19138374275515,19138374275516,19138374275517,19138374275518,19138374275519, 19138374275520,19138374275521,19138374275522,19142669238272,19142669242792,19142669242793,19142669242794,19142669242795,19142669242796,19142669242797,19142669242798,19142669242799,19142669242800,19142669242801,19142669242802,19142669242803, 19142669242804,19142669242805,19142669242806,19142669242807,19142669242808,19142669242809,19142669242810,19142669242811,19142669242812,19142669242813,19142669242814,19142669242815,19142669242816,19142669242817,19142669242818,19146964205568, 19146964210088,19146964210089,19146964210090,19146964210091,19146964210092,19146964210093,19146964210094,19146964210095,19146964210096,19146964210097,19146964210098,19146964210099,19146964210100,19146964210101,19146964210102,19146964210103, 19146964210104,19146964210105,19146964210106,19146964210107,19146964210108,19146964210109,19146964210110,19146964210111,19146964210112,19146964210113,19146964210114,19151259172864,19151259177384,19151259177385,19151259177386,19151259177387, 19151259177388,19151259177389,19151259177390,19151259177391,19151259177392,19151259177393,19151259177394,19151259177395,19151259177396,19151259177397,19151259177398,19151259177399,19151259177400,19151259177401,19151259177402,19151259177403, 19151259177404,19151259177405,19151259177406,19151259177407,19151259177408,19151259177409,19151259177410,19155554140160,19155554144680,19155554144681,19155554144682,19155554144683,19155554144684,19155554144685,19155554144686,19155554144687, 19155554144688,19155554144689,19155554144690,19155554144691,19155554144692,19155554144693,19155554144694,19155554144695,19155554144696,19155554144697,19155554144698,19155554144699,19155554144700,19155554144701,19155554144702,19155554144703, 19155554144704,19155554144705,19155554144706,19159849107456,19159849111976,19159849111977,19159849111978,19159849111979,19159849111980,19159849111981,19159849111982,19159849111983,19159849111984,19159849111985,19159849111986,19159849111987, 19159849111988,19159849111989,19159849111990,19159849111991,19159849111992,19159849111993,19159849111994,19159849111995,19159849111996,19159849111997,19159849111998,19159849111999,19159849112000,19159849112001,19159849112002,19164144074752, 19164144079272,19164144079273,19164144079274,19164144079275,19164144079276,19164144079277,19164144079278,19164144079279,19164144079280,19164144079281,19164144079282,19164144079283,19164144079284,19164144079285,19164144079286,19164144079287, 19164144079288,19164144079289,19164144079290,19164144079291,19164144079292,19164144079293,19164144079294,19164144079295,19164144079296,19164144079297,19164144079298,19168439042048,19168439046568,19168439046569,19168439046570,19168439046571, 19168439046572,19168439046573,19168439046574,19168439046575,19168439046576,19168439046577,19168439046578,19168439046579,19168439046580,19168439046581,19168439046582,19168439046583,19168439046584,19168439046585,19168439046586,19168439046587, 19168439046588,19168439046589,19168439046590,19168439046591,19168439046592,19168439046593,19168439046594,19172734009344,19172734013864,19172734013865,19172734013866,19172734013867,19172734013868,19172734013869,19172734013870,19172734013871, 19172734013872,19172734013873,19172734013874,19172734013875,19172734013876,19172734013877,19172734013878,19172734013879,19172734013880,19172734013881,19172734013882,19172734013883,19172734013884,19172734013885,19172734013886,19172734013887, 19172734013888,19172734013889,19172734013890,19177028976640,19177028981160,19177028981161,19177028981162,19177028981163,19177028981164,19177028981165,19177028981166,19177028981167,19177028981168,19177028981169,19177028981170,19177028981171, 19177028981172,19177028981173,19177028981174,19177028981175,19177028981176,19177028981177,19177028981178,19177028981179,19177028981180,19177028981181,19177028981182,19177028981183,19177028981184,19177028981185,19177028981186,19181323943936, 19181323948456,19181323948457,19181323948458,19181323948459,19181323948460,19181323948461,19181323948462,19181323948463,19181323948464,19181323948465,19181323948466,19181323948467,19181323948468,19181323948469,19181323948470,19181323948471, 19181323948472,19181323948473,19181323948474,19181323948475,19181323948476,19181323948477,19181323948478,19181323948479,19181323948480,19181323948481,19181323948482,19185618911232,19185618915752,19185618915753,19185618915754,19185618915755, 19185618915756,19185618915757,19185618915758,19185618915759,19185618915760,19185618915761,19185618915762,19185618915763,19185618915764,19185618915765,19185618915766,19185618915767,19185618915768,19185618915769,19185618915770,19185618915771, 19185618915772,19185618915773,19185618915774,19185618915775,19185618915776,19185618915777,19185618915778,19189913878528,19189913883048,19189913883049,19189913883050,19189913883051,19189913883052,19189913883053,19189913883054,19189913883055, 19189913883056,19189913883057,19189913883058,19189913883059,19189913883060,19189913883061,19189913883062,19189913883063,19189913883064,19189913883065,19189913883066,19189913883067,19189913883068,19189913883069,19189913883070,19189913883071, 19189913883072,19189913883073,19189913883074,19194208845824,19194208850344,19194208850345,19194208850346,19194208850347,19194208850348,19194208850349,19194208850350,19194208850351,19194208850352,19194208850353,19194208850354,19194208850355, 19194208850356,19194208850357,19194208850358,19194208850359,19194208850360,19194208850361,19194208850362,19194208850363,19194208850364,19194208850365,19194208850366,19194208850367,19194208850368,19194208850369,19194208850370,19108309499904, 19108309504424,19108309504425,19108309504426,19108309504427,19108309504428,19108309504429,19108309504430,19108309504431,19108309504432,19108309504433,19108309504434,19108309504435,19108309504436,19108309504437,19108309504438,19108309504439, 19108309504440,19108309504441,19108309504442,19108309504443,19108309504444,19108309504445,19108309504446,19108309504447,19108309504448,19108309504449,19108309504450,19112604467200,19112604471720,19112604471721,19112604471722,19112604471723, 19112604471724,19112604471725,19112604471726,19112604471727,19112604471728,19112604471729,19112604471730,19112604471731,19112604471732,19112604471733,19112604471734,19112604471735,19112604471736,19112604471737,19112604471738,19112604471739, 19112604471740,19112604471741,19112604471742,19112604471743,19112604471744,19112604471745,19112604471746,19116899434496,19116899439016,19116899439017,19116899439018,19116899439019,19116899439020,19116899439021,19116899439022,19116899439023, 19116899439024,19116899439025,19116899439026,19116899439027,19116899439028,19116899439029,19116899439030,19116899439031,19116899439032,19116899439033,19116899439034,19116899439035,19116899439036,19116899439037,19116899439038,19116899439039, 19116899439040,19116899439041,19116899439042,19121194401792,19121194406312,19121194406313,19121194406314,19121194406315,19121194406316,19121194406317,19121194406318,19121194406319,19121194406320,19121194406321,19121194406322,19121194406323, 19121194406324,19121194406325,19121194406326,19121194406327,19121194406328,19121194406329,19121194406330,19121194406331,19121194406332,19121194406333,19121194406334,19121194406335,19121194406336,19121194406337,19121194406338,19125489369088, 19125489373608,19125489373609,19125489373610,19125489373611,19125489373612,19125489373613,19125489373614,19125489373615,19125489373616,19125489373617,19125489373618,19125489373619,19125489373620,19125489373621,19125489373622,19125489373623, 19125489373624,19125489373625,19125489373626,19125489373627,19125489373628,19125489373629,19125489373630,19125489373631,19125489373632,19125489373633,19125489373634,19129784336384,19129784340904,19129784340905,19129784340906,19129784340907, 19129784340908,19129784340909,19129784340910,19129784340911,19129784340912,19129784340913,19129784340914,19129784340915,19129784340916,19129784340917,19129784340918,19129784340919,19129784340920,19129784340921,19129784340922,19129784340923, 19129784340924,19129784340925,19129784340926,19129784340927,19129784340928,19129784340929,19129784340930,19134079303680,19134079308200,19134079308201,19134079308202,19134079308203,19134079308204,19134079308205,19134079308206,19134079308207, 19134079308208,19134079308209,19134079308210,19134079308211,19134079308212,19134079308213,19134079308214,19134079308215,19134079308216,19134079308217,19134079308218,19134079308219,19134079308220,19134079308221,19134079308222,19134079308223, 19134079308224,19134079308225,19134079308226,19138374270976,19138374275496,19138374275497,19138374275498,19138374275499,19138374275500,19138374275501,19138374275502,19138374275503,19138374275504,19138374275505,19138374275506,19138374275507, 19138374275508,19138374275509,19138374275510,19138374275511,19138374275512,19138374275513,19138374275514,19138374275515,19138374275516,19138374275517,19138374275518,19138374275519,19138374275520,19138374275521,19138374275522,19142669238272, 19142669242792,19142669242793,19142669242794,19142669242795,19142669242796,19142669242797,19142669242798,19142669242799,19142669242800,19142669242801,19142669242802,19142669242803,19142669242804,19142669242805,19142669242806,19142669242807, 19142669242808,19142669242809,19142669242810,19142669242811,19142669242812,19142669242813,19142669242814,19142669242815,19142669242816,19142669242817,19142669242818,19146964205568,19146964210088,19146964210089,19146964210090,19146964210091, 19146964210092,19146964210093,19146964210094,19146964210095,19146964210096,19146964210097,19146964210098,19146964210099,19146964210100,19146964210101,19146964210102,19146964210103,19146964210104,19146964210105,19146964210106,19146964210107, 19146964210108,19146964210109,19146964210110,19146964210111,19146964210112,19146964210113,19146964210114,19151259172864,19151259177384,19151259177385,19151259177386,19151259177387,19151259177388,19151259177389,19151259177390,19151259177391, 19151259177392,19151259177393,19151259177394,19151259177395,19151259177396,19151259177397,19151259177398,19151259177399,19151259177400,19151259177401,19151259177402,19151259177403,19151259177404,19151259177405,19151259177406,19151259177407, 19151259177408,19151259177409,19151259177410,19155554140160,19155554144680,19155554144681,19155554144682,19155554144683,19155554144684,19155554144685,19155554144686,19155554144687,19155554144688,19155554144689,19155554144690,19155554144691, 19155554144692,19155554144693,19155554144694,19155554144695,19155554144696,19155554144697,19155554144698,19155554144699,19155554144700,19155554144701,19155554144702,19155554144703,19155554144704,19155554144705,19155554144706,19159849107456, 19159849111976,19159849111977,19159849111978,19159849111979,19159849111980,19159849111981,19159849111982,19159849111983,19159849111984,19159849111985,19159849111986,19159849111987,19159849111988,19159849111989,19159849111990,19159849111991, 19159849111992,19159849111993,19159849111994,19159849111995,19159849111996,19159849111997,19159849111998,19159849111999,19159849112000,19159849112001,19159849112002,19164144074752,19164144079272,19164144079273,19164144079274,19164144079275, 19164144079276,19164144079277,19164144079278,19164144079279,19164144079280,19164144079281,19164144079282,19164144079283,19164144079284,19164144079285,19164144079286,19164144079287,19164144079288,19164144079289,19164144079290,19164144079291, 19164144079292,19164144079293,19164144079294,19164144079295,19164144079296,19164144079297,19164144079298,19168439042048,19168439046568,19168439046569,19168439046570,19168439046571,19168439046572,19168439046573,19168439046574,19168439046575, 19168439046576,19168439046577,19168439046578,19168439046579,19168439046580,19168439046581,19168439046582,19168439046583,19168439046584,19168439046585,19168439046586,19168439046587,19168439046588,19168439046589,19168439046590,19168439046591, 19168439046592,19168439046593,19168439046594,19172734009344,19172734013864,19172734013865,19172734013866,19172734013867,19172734013868,19172734013869,19172734013870,19172734013871,19172734013872,19172734013873,19172734013874,19172734013875, 19172734013876,19172734013877,19172734013878,19172734013879,19172734013880,19172734013881,19172734013882,19172734013883,19172734013884,19172734013885,19172734013886,19172734013887,19172734013888,19172734013889,19172734013890,19177028976640, 19177028981160,19177028981161,19177028981162,19177028981163,19177028981164,19177028981165,19177028981166,19177028981167,19177028981168,19177028981169,19177028981170,19177028981171,19177028981172,19177028981173,19177028981174,19177028981175, 19177028981176,19177028981177,19177028981178,19177028981179,19177028981180,19177028981181,19177028981182,19177028981183,19177028981184,19177028981185,19177028981186,19181323943936,19181323948456,19181323948457,19181323948458,19181323948459, 19181323948460,19181323948461,19181323948462,19181323948463,19181323948464,19181323948465,19181323948466,19181323948467,19181323948468,19181323948469,19181323948470,19181323948471,19181323948472,19181323948473,19181323948474,19181323948475, 19181323948476,19181323948477,19181323948478,19181323948479,19181323948480,19181323948481,19181323948482,19185618911232,19185618915752,19185618915753,19185618915754,19185618915755,19185618915756,19185618915757,19185618915758,19185618915759, 19185618915760,19185618915761,19185618915762,19185618915763,19185618915764,19185618915765,19185618915766,19185618915767,19185618915768,19185618915769,19185618915770,19185618915771,19185618915772,19185618915773,19185618915774,19185618915775, 19185618915776,19185618915777,19185618915778,19189913878528,19189913883048,19189913883049,19189913883050,19189913883051,19189913883052,19189913883053,19189913883054,19189913883055,19189913883056,19189913883057,19189913883058,19189913883059, 19189913883060,19189913883061,19189913883062,19189913883063,19189913883064,19189913883065,19189913883066,19189913883067,19189913883068,19189913883069,19189913883070,19189913883071,19189913883072,19189913883073,19189913883074,19194208845824, 19194208850344,19194208850345,19194208850346,19194208850347,19194208850348,19194208850349,19194208850350,19194208850351,19194208850352,19194208850353,19194208850354,19194208850355,19194208850356,19194208850357,19194208850358,19194208850359, 19194208850360,19194208850361,19194208850362,19194208850363,19194208850364,19194208850365,19194208850366,19194208850367,19194208850368,19194208850369,19194208850370,19108309499904,19108309504424,19108309504425,19108309504426,19108309504427, 19108309504428,19108309504429,19108309504430,19108309504431,19108309504432,19108309504433,19108309504434,19108309504435,19108309504436,19108309504437,19108309504438,19108309504439,19108309504440,19108309504441,19108309504442,19108309504443, 19108309504444,19108309504445,19108309504446,19108309504447,19108309504448,19108309504449,19108309504450,19112604467200,19112604471720,19112604471721,19112604471722,19112604471723,19112604471724,19112604471725,19112604471726,19112604471727, 19112604471728,19112604471729,19112604471730,19112604471731,19112604471732,19112604471733,19112604471734,19112604471735,19112604471736,19112604471737,19112604471738,19112604471739,19112604471740,19112604471741,19112604471742,19112604471743, 19112604471744,19112604471745,19112604471746,19116899434496,19116899439016,19116899439017,19116899439018,19116899439019,19116899439020,19116899439021,19116899439022,19116899439023,19116899439024,19116899439025,19116899439026,19116899439027, 19116899439028,19116899439029,19116899439030,19116899439031,19116899439032,19116899439033,19116899439034,19116899439035,19116899439036,19116899439037,19116899439038,19116899439039,19116899439040,19116899439041,19116899439042,19121194401792, 19121194406312,19121194406313,19121194406314,19121194406315,19121194406316,19121194406317,19121194406318,19121194406319,19121194406320,19121194406321,19121194406322,19121194406323,19121194406324,19121194406325,19121194406326,19121194406327, 19121194406328,19121194406329,19121194406330,19121194406331,19121194406332,19121194406333,19121194406334,19121194406335,19121194406336,19121194406337,19121194406338,19125489369088,19125489373608,19125489373609,19125489373610,19125489373611, 19125489373612,19125489373613,19125489373614,19125489373615,19125489373616,19125489373617,19125489373618,19125489373619,19125489373620,19125489373621,19125489373622,19125489373623,19125489373624,19125489373625,19125489373626,19125489373627, 19125489373628,19125489373629,19125489373630,19125489373631,19125489373632,19125489373633,19125489373634,19129784336384,19129784340904,19129784340905,19129784340906,19129784340907,19129784340908,19129784340909,19129784340910,19129784340911, 19129784340912,19129784340913,19129784340914,19129784340915,19129784340916,19129784340917,19129784340918,19129784340919,19129784340920,19129784340921,19129784340922,19129784340923,19129784340924,19129784340925,19129784340926,19129784340927, 19129784340928,19129784340929,19129784340930,19134079303680,19134079308200,19134079308201,19134079308202,19134079308203,19134079308204,19134079308205,19134079308206,19134079308207,19134079308208,19134079308209,19134079308210,19134079308211, 19134079308212,19134079308213,19134079308214,19134079308215,19134079308216,19134079308217,19134079308218,19134079308219,19134079308220,19134079308221,19134079308222,19134079308223,19134079308224,19134079308225,19134079308226,19138374270976, 19138374275496,19138374275497,19138374275498,19138374275499,19138374275500,19138374275501,19138374275502,19138374275503,19138374275504,19138374275505,19138374275506,19138374275507,19138374275508,19138374275509,19138374275510,19138374275511, 19138374275512,19138374275513,19138374275514,19138374275515,19138374275516,19138374275517,19138374275518,19138374275519,19138374275520,19138374275521,19138374275522,19142669238272,19142669242792,19142669242793,19142669242794,19142669242795, 19142669242796,19142669242797,19142669242798,19142669242799,19142669242800,19142669242801,19142669242802,19142669242803,19142669242804,19142669242805,19142669242806,19142669242807,19142669242808,19142669242809,19142669242810,19142669242811, 19142669242812,19142669242813,19142669242814,19142669242815,19142669242816,19142669242817,19142669242818,19146964205568,19146964210088,19146964210089,19146964210090,19146964210091,19146964210092,19146964210093,19146964210094,19146964210095, 19146964210096,19146964210097,19146964210098,19146964210099,19146964210100,19146964210101,19146964210102,19146964210103,19146964210104,19146964210105,19146964210106,19146964210107,19146964210108,19146964210109,19146964210110,19146964210111, 19146964210112,19146964210113,19146964210114,19151259172864,19151259177384,19151259177385,19151259177386,19151259177387,19151259177388,19151259177389,19151259177390,19151259177391,19151259177392,19151259177393,19151259177394,19151259177395, 19151259177396,19151259177397,19151259177398,19151259177399,19151259177400,19151259177401,19151259177402,19151259177403,19151259177404,19151259177405,19151259177406,19151259177407,19151259177408,19151259177409,19151259177410,19155554140160, 19155554144680,19155554144681,19155554144682,19155554144683,19155554144684,19155554144685,19155554144686,19155554144687,19155554144688,19155554144689,19155554144690,19155554144691,19155554144692,19155554144693,19155554144694,19155554144695, 19155554144696,19155554144697,19155554144698,19155554144699,19155554144700,19155554144701,19155554144702,19155554144703,19155554144704,19155554144705,19155554144706,19159849107456,19159849111976,19159849111977,19159849111978,19159849111979, 19159849111980,19159849111981,19159849111982,19159849111983,19159849111984,19159849111985,19159849111986,19159849111987,19159849111988,19159849111989,19159849111990,19159849111991,19159849111992,19159849111993,19159849111994,19159849111995, 19159849111996,19159849111997,19159849111998,19159849111999,19159849112000,19159849112001,19159849112002,19164144074752,19164144079272,19164144079273,19164144079274,19164144079275,19164144079276,19164144079277,19164144079278,19164144079279, 19164144079280,19164144079281,19164144079282,19164144079283,19164144079284,19164144079285,19164144079286,19164144079287,19164144079288,19164144079289,19164144079290,19164144079291,19164144079292,19164144079293,19164144079294,19164144079295, 19164144079296,19164144079297,19164144079298,19168439042048,19168439046568,19168439046569,19168439046570,19168439046571,19168439046572,19168439046573,19168439046574,19168439046575,19168439046576,19168439046577,19168439046578,19168439046579, 19168439046580,19168439046581,19168439046582,19168439046583,19168439046584,19168439046585,19168439046586,19168439046587,19168439046588,19168439046589,19168439046590,19168439046591,19168439046592,19168439046593,19168439046594,19172734009344, 19172734013864,19172734013865,19172734013866,19172734013867,19172734013868,19172734013869,19172734013870,19172734013871,19172734013872,19172734013873,19172734013874,19172734013875,19172734013876,19172734013877,19172734013878,19172734013879, 19172734013880,19172734013881,19172734013882,19172734013883,19172734013884,19172734013885,19172734013886,19172734013887,19172734013888,19172734013889,19172734013890,19177028976640,19177028981160,19177028981161,19177028981162,19177028981163, 19177028981164,19177028981165,19177028981166,19177028981167,19177028981168,19177028981169,19177028981170,19177028981171,19177028981172,19177028981173,19177028981174,19177028981175,19177028981176,19177028981177,19177028981178,19177028981179, 19177028981180,19177028981181,19177028981182,19177028981183,19177028981184,19177028981185,19177028981186,19181323943936,19181323948456,19181323948457,19181323948458,19181323948459,19181323948460,19181323948461,19181323948462,19181323948463, 19181323948464,19181323948465,19181323948466,19181323948467,19181323948468,19181323948469,19181323948470,19181323948471,19181323948472,19181323948473,19181323948474,19181323948475,19181323948476,19181323948477,19181323948478,19181323948479, 19181323948480,19181323948481,19181323948482,19185618911232,19185618915752,19185618915753,19185618915754,19185618915755,19185618915756,19185618915757,19185618915758,19185618915759,19185618915760,19185618915761,19185618915762,19185618915763, 19185618915764,19185618915765,19185618915766,19185618915767,19185618915768,19185618915769,19185618915770,19185618915771,19185618915772,19185618915773,19185618915774,19185618915775,19185618915776,19185618915777,19185618915778,19189913878528, 19189913883048,19189913883049,19189913883050,19189913883051,19189913883052,19189913883053,19189913883054,19189913883055,19189913883056,19189913883057,19189913883058,19189913883059,19189913883060,19189913883061,19189913883062,19189913883063, 19189913883064,19189913883065,19189913883066,19189913883067,19189913883068,19189913883069,19189913883070,19189913883071,19189913883072,19189913883073,19189913883074,19194208845824,19194208850344,19194208850345,19194208850346,19194208850347, 19194208850348,19194208850349,19194208850350,19194208850351,19194208850352,19194208850353,19194208850354,19194208850355,19194208850356,19194208850357,19194208850358,19194208850359,19194208850360,19194208850361,19194208850362,19194208850363, 19194208850364,19194208850365,19194208850366,19194208850367,19194208850368,19194208850369,19194208850370,19108309499904,19108309504424,19108309504425,19108309504426,19108309504427,19108309504428,19108309504429,19108309504430,19108309504431, 19108309504432,19108309504433,19108309504434,19108309504435,19108309504436,19108309504437,19108309504438,19108309504439,19108309504440,19108309504441,19108309504442,19108309504443,19108309504444,19108309504445,19108309504446,19108309504447, 19108309504448,19108309504449,19108309504450,19112604467200,19112604471720,19112604471721,19112604471722,19112604471723,19112604471724,19112604471725,19112604471726,19112604471727,19112604471728,19112604471729,19112604471730,19112604471731, 19112604471732,19112604471733,19112604471734,19112604471735,19112604471736,19112604471737,19112604471738,19112604471739,19112604471740,19112604471741,19112604471742,19112604471743,19112604471744,19112604471745,19112604471746,19116899434496, 19116899439016,19116899439017,19116899439018,19116899439019,19116899439020,19116899439021,19116899439022,19116899439023,19116899439024,19116899439025,19116899439026,19116899439027,19116899439028,19116899439029,19116899439030,19116899439031, 19116899439032,19116899439033,19116899439034,19116899439035,19116899439036,19116899439037,19116899439038,19116899439039,19116899439040,19116899439041,19116899439042,19121194401792,19121194406312,19121194406313,19121194406314,19121194406315, 19121194406316,19121194406317,19121194406318,19121194406319,19121194406320,19121194406321,19121194406322,19121194406323,19121194406324,19121194406325,19121194406326,19121194406327,19121194406328,19121194406329,19121194406330,19121194406331, 19121194406332,19121194406333,19121194406334,19121194406335,19121194406336,19121194406337,19121194406338,19125489369088,19125489373608,19125489373609,19125489373610,19125489373611,19125489373612,19125489373613,19125489373614,19125489373615, 19125489373616,19125489373617,19125489373618,19125489373619,19125489373620,19125489373621,19125489373622,19125489373623,19125489373624,19125489373625,19125489373626,19125489373627,19125489373628,19125489373629,19125489373630,19125489373631, 19125489373632,19125489373633,19125489373634,19129784336384,19129784340904,19129784340905,19129784340906,19129784340907,19129784340908,19129784340909,19129784340910,19129784340911,19129784340912,19129784340913,19129784340914,19129784340915, 19129784340916,19129784340917,19129784340918,19129784340919,19129784340920,19129784340921,19129784340922,19129784340923,19129784340924,19129784340925,19129784340926,19129784340927,19129784340928,19129784340929,19129784340930,19134079303680, 19134079308200,19134079308201,19134079308202,19134079308203,19134079308204,19134079308205,19134079308206,19134079308207,19134079308208,19134079308209,19134079308210,19134079308211,19134079308212,19134079308213,19134079308214,19134079308215, 19134079308216,19134079308217,19134079308218,19134079308219,19134079308220,19134079308221,19134079308222,19134079308223,19134079308224,19134079308225,19134079308226,19138374270976,19138374275496,19138374275497,19138374275498,19138374275499, 19138374275500,19138374275501,19138374275502,19138374275503,19138374275504,19138374275505,19138374275506,19138374275507,19138374275508,19138374275509,19138374275510,19138374275511,19138374275512,19138374275513,19138374275514,19138374275515, 19138374275516,19138374275517,19138374275518,19138374275519,19138374275520,19138374275521,19138374275522,19142669238272,19142669242792,19142669242793,19142669242794,19142669242795,19142669242796,19142669242797,19142669242798,19142669242799, 19142669242800,19142669242801,19142669242802,19142669242803,19142669242804,19142669242805,19142669242806,19142669242807,19142669242808,19142669242809,19142669242810,19142669242811,19142669242812,19142669242813,19142669242814,19142669242815, 19142669242816,19142669242817,19142669242818,19146964205568,19146964210088,19146964210089,19146964210090,19146964210091,19146964210092,19146964210093,19146964210094,19146964210095,19146964210096,19146964210097,19146964210098,19146964210099, 19146964210100,19146964210101,19146964210102,19146964210103,19146964210104,19146964210105,19146964210106,19146964210107,19146964210108,19146964210109,19146964210110,19146964210111,19146964210112,19146964210113,19146964210114,19151259172864, 19151259177384,19151259177385,19151259177386,19151259177387,19151259177388,19151259177389,19151259177390,19151259177391,19151259177392,19151259177393,19151259177394,19151259177395,19151259177396,19151259177397,19151259177398,19151259177399, 19151259177400,19151259177401,19151259177402,19151259177403,19151259177404,19151259177405,19151259177406,19151259177407,19151259177408,19151259177409,19151259177410,19155554140160,19155554144680,19155554144681,19155554144682,19155554144683, 19155554144684,19155554144685,19155554144686,19155554144687,19155554144688,19155554144689,19155554144690,19155554144691,19155554144692,19155554144693,19155554144694,19155554144695,19155554144696,19155554144697,19155554144698,19155554144699, 19155554144700,19155554144701,19155554144702,19155554144703,19155554144704,19155554144705,19155554144706,19159849107456,19159849111976,19159849111977,19159849111978,19159849111979,19159849111980,19159849111981,19159849111982,19159849111983, 19159849111984,19159849111985,19159849111986,19159849111987,19159849111988,19159849111989,19159849111990,19159849111991,19159849111992,19159849111993,19159849111994,19159849111995,19159849111996,19159849111997,19159849111998,19159849111999, 19159849112000,19159849112001,19159849112002,19164144074752,19164144079272,19164144079273,19164144079274,19164144079275,19164144079276,19164144079277,19164144079278,19164144079279,19164144079280,19164144079281,19164144079282,19164144079283, 19164144079284,19164144079285,19164144079286,19164144079287,19164144079288,19164144079289,19164144079290,19164144079291,19164144079292,19164144079293,19164144079294,19164144079295,19164144079296,19164144079297,19164144079298,19168439042048, 19168439046568,19168439046569,19168439046570,19168439046571,19168439046572,19168439046573,19168439046574,19168439046575,19168439046576,19168439046577,19168439046578,19168439046579,19168439046580,19168439046581,19168439046582,19168439046583, 19168439046584,19168439046585,19168439046586,19168439046587,19168439046588,19168439046589,19168439046590,19168439046591,19168439046592,19168439046593,19168439046594,19172734009344,19172734013864,19172734013865,19172734013866,19172734013867, 19172734013868,19172734013869,19172734013870,19172734013871,19172734013872,19172734013873,19172734013874,19172734013875,19172734013876,19172734013877,19172734013878,19172734013879,19172734013880,19172734013881,19172734013882,19172734013883, 19172734013884,19172734013885,19172734013886,19172734013887,19172734013888,19172734013889,19172734013890,19177028976640,19177028981160,19177028981161,19177028981162,19177028981163,19177028981164,19177028981165,19177028981166,19177028981167, 19177028981168,19177028981169,19177028981170,19177028981171,19177028981172,19177028981173,19177028981174,19177028981175,19177028981176,19177028981177,19177028981178,19177028981179,19177028981180,19177028981181,19177028981182,19177028981183, 19177028981184,19177028981185,19177028981186,19181323943936,19181323948456,19181323948457,19181323948458,19181323948459,19181323948460,19181323948461,19181323948462,19181323948463,19181323948464,19181323948465,19181323948466,19181323948467, 19181323948468,19181323948469,19181323948470,19181323948471,19181323948472,19181323948473,19181323948474,19181323948475,19181323948476,19181323948477,19181323948478,19181323948479,19181323948480,19181323948481,19181323948482,19185618911232, 19185618915752,19185618915753,19185618915754,19185618915755,19185618915756,19185618915757,19185618915758,19185618915759,19185618915760,19185618915761,19185618915762,19185618915763,19185618915764,19185618915765,19185618915766,19185618915767, 19185618915768,19185618915769,19185618915770,19185618915771,19185618915772,19185618915773,19185618915774,19185618915775,19185618915776,19185618915777,19185618915778,19189913878528,19189913883048,19189913883049,19189913883050,19189913883051, 19189913883052,19189913883053,19189913883054,19189913883055,19189913883056,19189913883057,19189913883058,19189913883059,19189913883060,19189913883061,19189913883062,19189913883063,19189913883064,19189913883065,19189913883066,19189913883067, 19189913883068,19189913883069,19189913883070,19189913883071,19189913883072,19189913883073,19189913883074,19194208845824,19194208850344,19194208850345,19194208850346,19194208850347,19194208850348,19194208850349,19194208850350,19194208850351, 19194208850352,19194208850353,19194208850354,19194208850355,19194208850356,19194208850357,19194208850358,19194208850359,19194208850360,19194208850361,19194208850362,19194208850363,19194208850364,19194208850365,19194208850366,19194208850367, 19194208850368,19194208850369,19194208850370}; uint64_t aux_cp_data_70475_71099[] = { 302631985610752,302739359793152,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,304221123510272,0,304276958085120,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,305316340170752,305316340170752}; uint64_t aux_cp_data_119134_119232[] = { 511706698612736,511706698612736,511706698731886, 511706698731887,511706698731888,511706698731889,511706698731890,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,511706698612736,511706698612736,511706698731886,511706698731886,511706698731887,511706698731887 };
0
rapidsai_public_repos/cudf/cpp/src/text/subword
rapidsai_public_repos/cudf/cpp/src/text/subword/detail/data_normalizer.hpp
/* * Copyright (c) 2020-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #pragma once #include <text/subword/detail/cp_data.h> #include <cudf/types.hpp> #include <rmm/cuda_stream_view.hpp> #include <rmm/device_uvector.hpp> using uvector_pair = std::pair<std::unique_ptr<rmm::device_uvector<uint32_t>>, std::unique_ptr<rmm::device_uvector<cudf::size_type>>>; namespace nvtext { namespace detail { /** * @brief Performs text cleaning for the tokenizers. * * Every instantiation of this class will transfer the meta data over to the GPU. * It is advised to create one class and reuse that class as needed. * * Converts characters to lowercase, adds spaces around punctuation and multi-byte * characters, strips accents from letters in the text and standardizes whitespace * characters to all be the code point for the " " literal. * * The algorithm produces two vectors of integers `uvector_pair`. * The first is the size of 3 uint32 values per input byte (of the strings buffer). * The second is the same size as the input offsets vector -- number of strings + 1. * * A temporary buffer is created equal to 1 uint32 value per input byte. * This means 16x the number bytes of the input strings buffer must be available * to call the `normalize()` function in this class. */ class data_normalizer { public: /** * @brief Create instance of the normalizer. * * @param cp_metadata The code point metadata table to use for normalization. * @param aux_table The auxiliary code point table. * @param do_lower_case If true, the normalizer will convert uppercase characters in the * input stream to lower case and strip accents from those characters. * If false, accented and uppercase characters are not transformed. */ data_normalizer(codepoint_metadata_type const* cp_metadata, aux_codepoint_data_type const* aux_table, bool do_lower_case = true); /** * @brief Normalize a vector of strings. * * If `do_lower_case` is true, this function will convert each character to lowercase * and strip accents from the characters. If false it will do all other conversions * in the class description except lower-casing and punctuation stripping. * * The result of this function returns two pointers to GPU data. * The first pointer is to a contiguous array of unicode code points corresponding to the * characters in the text after running normalization. The second pointer is to the * offsets of the strings in the code point array. That is, string `i` starts at * `result.second->data()[i]`. * This array will always be of length `num_strings + 1` since we need one entry * for each input and a last entry which has the total number of bytes. * * @param d_strings A vector of strings which MUST be encoded in the UTF-8 format. * @param d_offsets A vector of byte offsets to the beginning of individual strings in * the `d_strings` parameter. * @param num_strings The number of strings identified in `d_strings`. * @param stream CUDA stream used for device memory operations and kernel launches. * @return Two pointers to GPU data buffers. The first is a pointer * to the code points array and the second is a pointer to the offsets * used to locate the code points for each string. */ uvector_pair normalize(char const* d_strings, cudf::size_type const* d_offsets, cudf::size_type num_strings, rmm::cuda_stream_view stream) const; private: bool const do_lower_case; codepoint_metadata_type const* d_cp_metadata; aux_codepoint_data_type const* d_aux_table; }; } // namespace detail } // namespace nvtext
0
rapidsai_public_repos/cudf/cpp/src/text
rapidsai_public_repos/cudf/cpp/src/text/utilities/tokenize_ops.cuh
/* * Copyright (c) 2020-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include <cudf/column/column_device_view.cuh> #include <cudf/strings/detail/utf8.hpp> #include <cudf/strings/string_view.cuh> #include <thrust/execution_policy.h> #include <thrust/find.h> #include <thrust/logical.h> #include <thrust/pair.h> namespace nvtext { namespace detail { using string_index_pair = thrust::pair<char const*, cudf::size_type>; using position_pair = thrust::pair<cudf::size_type, cudf::size_type>; /** * @brief Tokenizer class that use multi-character delimiters. * * This is common code for tokenize, token-counters, normalize functions. * If an empty delimiter string is specified, then whitespace * (code-point <= ' ') is used to identify tokens. * * After instantiating this object, use the `next_token()` method * to parse tokens and the `token_byte_positions()` to retrieve the * current token's byte offsets within the string. */ struct characters_tokenizer { /** * @brief Constructor for characters_tokenizer. * * @param d_str The string to tokenize. * @param d_delimiter The (optional) delimiter to locate tokens. */ __device__ characters_tokenizer(cudf::string_view const& d_str, cudf::string_view const& d_delimiter = cudf::string_view{}) : d_str{d_str}, d_delimiter{d_delimiter}, spaces{true}, current_position{0}, start_position(0), end_position(d_str.size_bytes()) { } /** * @brief Return true if the given character is a delimiter. * * For empty delimiter, whitespace code-point is checked. * * @param chr The character to test. * @return true if the character is a delimiter */ __device__ bool is_delimiter(cudf::char_utf8 chr) const { return d_delimiter.empty() ? (chr <= ' ') : // whitespace check thrust::any_of(thrust::seq, d_delimiter.begin(), d_delimiter.end(), [chr] __device__(cudf::char_utf8 c) { return c == chr; }); } /** * @brief Identifies the bounds of the next token in the given * string at the specified iterator position. * * For empty delimiter, whitespace code-point is checked. * Starting at the current_position, a token * start position is identified when a delimiter is * not found. Once found, the end position is identified * when a delimiter or the end of the string is found. * * @return true if a token has been found */ __device__ bool next_token() { auto const src_ptr = d_str.data(); if (current_position >= d_str.size_bytes()) { return false; } if (current_position != 0) { // skip these 2 lines the first time through current_position += cudf::strings::detail::bytes_in_char_utf8(src_ptr[current_position]); start_position = current_position; } if (start_position >= d_str.size_bytes()) { return false; } // continue search for the next token end_position = d_str.size_bytes(); while (current_position < d_str.size_bytes()) { cudf::char_utf8 ch = 0; auto const chr_width = cudf::strings::detail::to_char_utf8(src_ptr + current_position, ch); if (spaces == is_delimiter(ch)) { current_position += chr_width; if (spaces) { start_position = current_position; } else { end_position = current_position; } continue; } spaces = !spaces; if (spaces) { end_position = current_position; break; } current_position += chr_width; } return start_position < end_position; } /** * @brief Returns the byte offsets for the current token * within this string. * * @return Byte positions of the current token. */ __device__ position_pair token_byte_positions() const { return position_pair{start_position, end_position}; } private: cudf::string_view const d_str; ///< string to tokenize cudf::string_view const d_delimiter; ///< delimiter characters bool spaces; ///< true if current position is delimiter cudf::size_type current_position; ///< current position in d_str cudf::size_type start_position; ///< starting byte position of token found cudf::size_type end_position; ///< ending byte position (exclusive) of token found }; /** * @brief Tokenizing function for multi-character delimiter. * * The first pass simply counts the tokens so the size of the output * vector can be calculated. The second pass places the token * positions into the d_tokens vector. */ struct strings_tokenizer { cudf::column_device_view const d_strings; ///< strings to tokenize cudf::string_view const d_delimiter; ///< delimiter characters to tokenize around cudf::size_type* d_offsets{}; ///< offsets into the d_tokens vector for each string string_index_pair* d_tokens{}; ///< token positions in device memory /** * @brief Identifies the token positions within each string. * * This counts the tokens in each string and also places the token positions * into the d_tokens member. * * @param idx Index of the string to tokenize in the d_strings column. * @return The number of tokens for this string. */ __device__ cudf::size_type operator()(cudf::size_type idx) { if (d_strings.is_null(idx)) return 0; auto d_str = d_strings.element<cudf::string_view>(idx); // create tokenizer for this string characters_tokenizer tokenizer(d_str, d_delimiter); string_index_pair* d_str_tokens = d_tokens ? d_tokens + d_offsets[idx] : nullptr; cudf::size_type token_idx = 0; while (tokenizer.next_token()) { if (d_str_tokens) { auto token_pos = tokenizer.token_byte_positions(); d_str_tokens[token_idx] = string_index_pair{d_str.data() + token_pos.first, (token_pos.second - token_pos.first)}; } ++token_idx; } return token_idx; // number of tokens found } }; // delimiters' iterator = delimiterator using delimiterator = cudf::column_device_view::const_iterator<cudf::string_view>; /** * @brief Tokenizes strings using multiple string delimiters. * * One or more strings are used as delimiters to identify tokens inside * each string of a given strings column. */ struct multi_delimiter_strings_tokenizer { cudf::column_device_view const d_strings; ///< strings column to tokenize delimiterator delimiters_begin; ///< first delimiter delimiterator delimiters_end; ///< last delimiter cudf::size_type* d_offsets{}; ///< offsets into the d_tokens output vector string_index_pair* d_tokens{}; ///< token positions found for each string /** * @brief Identifies the token positions within each string. * * This counts the tokens in each string and also places the token positions * into the d_tokens member. * * @param idx Index of the string to tokenize in the d_strings column. * @return The number of tokens for this string. */ __device__ cudf::size_type operator()(cudf::size_type idx) { if (d_strings.is_null(idx)) return 0; cudf::string_view d_str = d_strings.element<cudf::string_view>(idx); auto d_str_tokens = d_tokens ? d_tokens + d_offsets[idx] : nullptr; auto data_ptr = d_str.data(); cudf::size_type last_pos = 0; cudf::size_type token_idx = 0; // check for delimiters at each character position for (auto itr = d_str.begin(); itr != d_str.end(); ++itr) { auto curr_ptr = data_ptr + itr.byte_offset(); cudf::string_view sub_str( curr_ptr, static_cast<cudf::size_type>(data_ptr + d_str.size_bytes() - curr_ptr)); // look for delimiter at current position auto itr_find = thrust::find_if( thrust::seq, delimiters_begin, delimiters_end, [sub_str] __device__(cudf::string_view const& d_delim) { return !d_delim.empty() && (d_delim.size_bytes() <= sub_str.size_bytes()) && d_delim.compare(sub_str.data(), d_delim.size_bytes()) == 0; }); if (itr_find != delimiters_end) { // found delimiter auto token_size = static_cast<cudf::size_type>((curr_ptr - data_ptr) - last_pos); if (token_size > 0) // we only care about non-zero sized tokens { if (d_str_tokens) d_str_tokens[token_idx] = string_index_pair{data_ptr + last_pos, token_size}; ++token_idx; } last_pos = (curr_ptr - data_ptr) + (*itr_find).size_bytes(); // point past delimiter itr += (*itr_find).length() - 1; } } if (last_pos < d_str.size_bytes()) // left-over tokens { if (d_str_tokens) d_str_tokens[token_idx] = string_index_pair{data_ptr + last_pos, d_str.size_bytes() - last_pos}; ++token_idx; } return token_idx; // this is the number of tokens found for this string } }; } // namespace detail } // namespace nvtext
0
rapidsai_public_repos/cudf/cpp/src
rapidsai_public_repos/cudf/cpp/src/io/functions.cpp
/* * Copyright (c) 2019-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include <io/orc/orc.hpp> #include <cudf/detail/iterator.cuh> #include <cudf/detail/nvtx/ranges.hpp> #include <cudf/io/avro.hpp> #include <cudf/io/csv.hpp> #include <cudf/io/data_sink.hpp> #include <cudf/io/datasource.hpp> #include <cudf/io/detail/avro.hpp> #include <cudf/io/detail/csv.hpp> #include <cudf/io/detail/json.hpp> #include <cudf/io/detail/orc.hpp> #include <cudf/io/detail/parquet.hpp> #include <cudf/io/json.hpp> #include <cudf/io/orc.hpp> #include <cudf/io/orc_metadata.hpp> #include <cudf/io/parquet.hpp> #include <cudf/io/parquet_metadata.hpp> #include <cudf/table/table.hpp> #include <cudf/utilities/default_stream.hpp> #include <cudf/utilities/error.hpp> #include <algorithm> namespace cudf { namespace io { // Returns builder for csv_reader_options csv_reader_options_builder csv_reader_options::builder(source_info src) { return csv_reader_options_builder{std::move(src)}; } // Returns builder for csv_writer_options csv_writer_options_builder csv_writer_options::builder(sink_info const& sink, table_view const& table) { return csv_writer_options_builder{sink, table}; } // Returns builder for orc_reader_options orc_reader_options_builder orc_reader_options::builder(source_info src) { return orc_reader_options_builder{std::move(src)}; } // Returns builder for orc_writer_options orc_writer_options_builder orc_writer_options::builder(sink_info const& sink, table_view const& table) { return orc_writer_options_builder{sink, table}; } // Returns builder for chunked_orc_writer_options chunked_orc_writer_options_builder chunked_orc_writer_options::builder(sink_info const& sink) { return chunked_orc_writer_options_builder{sink}; } // Returns builder for avro_reader_options avro_reader_options_builder avro_reader_options::builder(source_info src) { return avro_reader_options_builder(std::move(src)); } // Returns builder for json_reader_options json_reader_options_builder json_reader_options::builder(source_info src) { return json_reader_options_builder(std::move(src)); } // Returns builder for orc_writer_options json_writer_options_builder json_writer_options::builder(sink_info const& sink, table_view const& table) { return json_writer_options_builder{sink, table}; } // Returns builder for parquet_reader_options parquet_reader_options_builder parquet_reader_options::builder(source_info src) { return parquet_reader_options_builder{std::move(src)}; } // Returns builder for parquet_writer_options parquet_writer_options_builder parquet_writer_options::builder(sink_info const& sink, table_view const& table) { return parquet_writer_options_builder{sink, table}; } // Returns builder for parquet_writer_options parquet_writer_options_builder parquet_writer_options::builder() { return parquet_writer_options_builder(); } // Returns builder for chunked_parquet_writer_options chunked_parquet_writer_options_builder chunked_parquet_writer_options::builder( sink_info const& sink) { return chunked_parquet_writer_options_builder(sink); } namespace { std::vector<std::unique_ptr<cudf::io::datasource>> make_datasources(source_info const& info, size_t range_offset = 0, size_t range_size = 0) { switch (info.type()) { case io_type::FILEPATH: { auto sources = std::vector<std::unique_ptr<cudf::io::datasource>>(); for (auto const& filepath : info.filepaths()) { sources.emplace_back(cudf::io::datasource::create(filepath, range_offset, range_size)); } return sources; } case io_type::HOST_BUFFER: return cudf::io::datasource::create(info.host_buffers()); case io_type::DEVICE_BUFFER: return cudf::io::datasource::create(info.device_buffers()); case io_type::USER_IMPLEMENTED: return cudf::io::datasource::create(info.user_sources()); default: CUDF_FAIL("Unsupported source type"); } } std::vector<std::unique_ptr<data_sink>> make_datasinks(sink_info const& info) { switch (info.type()) { case io_type::FILEPATH: return cudf::io::data_sink::create(info.filepaths()); case io_type::HOST_BUFFER: return cudf::io::data_sink::create(info.buffers()); case io_type::VOID: { std::vector<std::unique_ptr<data_sink>> sinks; for (size_t i = 0; i < info.num_sinks(); ++i) { sinks.push_back(cudf::io::data_sink::create()); } return sinks; } case io_type::USER_IMPLEMENTED: return cudf::io::data_sink::create(info.user_sinks()); default: CUDF_FAIL("Unsupported sink type"); } } } // namespace table_with_metadata read_avro(avro_reader_options const& options, rmm::mr::device_memory_resource* mr) { namespace avro = cudf::io::detail::avro; CUDF_FUNC_RANGE(); auto datasources = make_datasources(options.get_source()); CUDF_EXPECTS(datasources.size() == 1, "Only a single source is currently supported."); return avro::read_avro(std::move(datasources[0]), options, cudf::get_default_stream(), mr); } compression_type infer_compression_type(compression_type compression, source_info const& info) { if (compression != compression_type::AUTO) { return compression; } if (info.type() != io_type::FILEPATH) { return compression_type::NONE; } auto filepath = info.filepaths()[0]; // Attempt to infer from the file extension auto const pos = filepath.find_last_of('.'); if (pos == std::string::npos) { return {}; } auto str_tolower = [](auto const& begin, auto const& end) { std::string out; std::transform(begin, end, std::back_inserter(out), ::tolower); return out; }; auto const ext = str_tolower(filepath.begin() + pos + 1, filepath.end()); if (ext == "gz") { return compression_type::GZIP; } if (ext == "zip") { return compression_type::ZIP; } if (ext == "bz2") { return compression_type::BZIP2; } if (ext == "xz") { return compression_type::XZ; } return compression_type::NONE; } table_with_metadata read_json(json_reader_options options, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { CUDF_FUNC_RANGE(); options.set_compression(infer_compression_type(options.get_compression(), options.get_source())); auto datasources = make_datasources(options.get_source(), options.get_byte_range_offset(), options.get_byte_range_size_with_padding()); return json::detail::read_json(datasources, options, stream, mr); } void write_json(json_writer_options const& options, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { auto sinks = make_datasinks(options.get_sink()); CUDF_EXPECTS(sinks.size() == 1, "Multiple sinks not supported for JSON writing"); return json::detail::write_json( // sinks[0].get(), options.get_table(), options, stream, mr); } table_with_metadata read_csv(csv_reader_options options, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { CUDF_FUNC_RANGE(); options.set_compression(infer_compression_type(options.get_compression(), options.get_source())); auto datasources = make_datasources(options.get_source(), options.get_byte_range_offset(), options.get_byte_range_size_with_padding()); CUDF_EXPECTS(datasources.size() == 1, "Only a single source is currently supported."); return cudf::io::detail::csv::read_csv( // std::move(datasources[0]), options, stream, mr); } // Freeform API wraps the detail writer class API void write_csv(csv_writer_options const& options, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { using namespace cudf::io::detail; auto sinks = make_datasinks(options.get_sink()); CUDF_EXPECTS(sinks.size() == 1, "Multiple sinks not supported for CSV writing"); return csv::write_csv( // sinks[0].get(), options.get_table(), options.get_names(), options, stream, mr); } namespace detail_orc = cudf::io::detail::orc; raw_orc_statistics read_raw_orc_statistics(source_info const& src_info) { auto stream = cudf::get_default_stream(); // Get source to read statistics from std::unique_ptr<datasource> source; if (src_info.type() == io_type::FILEPATH) { CUDF_EXPECTS(src_info.filepaths().size() == 1, "Only a single source is currently supported."); source = cudf::io::datasource::create(src_info.filepaths()[0]); } else if (src_info.type() == io_type::HOST_BUFFER) { CUDF_EXPECTS(src_info.host_buffers().size() == 1, "Only a single source is currently supported."); source = cudf::io::datasource::create(src_info.host_buffers()[0]); } else if (src_info.type() == io_type::DEVICE_BUFFER) { CUDF_EXPECTS(src_info.device_buffers().size() == 1, "Only a single source is currently supported."); source = cudf::io::datasource::create(src_info.device_buffers()[0]); } else if (src_info.type() == io_type::USER_IMPLEMENTED) { CUDF_EXPECTS(src_info.user_sources().size() == 1, "Only a single source is currently supported."); source = cudf::io::datasource::create(src_info.user_sources()[0]); } else { CUDF_FAIL("Unsupported source type"); } orc::metadata metadata(source.get(), stream); // Initialize statistics to return raw_orc_statistics result; // Get column names for (auto i = 0; i < metadata.get_num_columns(); i++) { result.column_names.push_back(metadata.column_name(i)); } // Get file-level statistics, statistics of each column of file for (auto const& stats : metadata.ff.statistics) { result.file_stats.push_back(std::string(stats.cbegin(), stats.cend())); } // Get stripe-level statistics for (auto const& stripes_stats : metadata.md.stripeStats) { result.stripes_stats.emplace_back(); for (auto const& stats : stripes_stats.colStats) { result.stripes_stats.back().push_back(std::string(stats.cbegin(), stats.cend())); } } return result; } column_statistics::column_statistics(cudf::io::orc::column_statistics&& cs) { number_of_values = cs.number_of_values; has_null = cs.has_null; if (cs.int_stats) { type_specific_stats = *cs.int_stats; } else if (cs.double_stats) { type_specific_stats = *cs.double_stats; } else if (cs.string_stats) { type_specific_stats = *cs.string_stats; } else if (cs.bucket_stats) { type_specific_stats = *cs.bucket_stats; } else if (cs.decimal_stats) { type_specific_stats = *cs.decimal_stats; } else if (cs.date_stats) { type_specific_stats = *cs.date_stats; } else if (cs.binary_stats) { type_specific_stats = *cs.binary_stats; } else if (cs.timestamp_stats) { type_specific_stats = *cs.timestamp_stats; } } parsed_orc_statistics read_parsed_orc_statistics(source_info const& src_info) { auto const raw_stats = read_raw_orc_statistics(src_info); parsed_orc_statistics result; result.column_names = raw_stats.column_names; auto parse_column_statistics = [](auto const& raw_col_stats) { orc::column_statistics stats_internal; orc::ProtobufReader(reinterpret_cast<uint8_t const*>(raw_col_stats.c_str()), raw_col_stats.size()) .read(stats_internal); return column_statistics(std::move(stats_internal)); }; std::transform(raw_stats.file_stats.cbegin(), raw_stats.file_stats.cend(), std::back_inserter(result.file_stats), parse_column_statistics); for (auto const& raw_stripe_stats : raw_stats.stripes_stats) { result.stripes_stats.emplace_back(); std::transform(raw_stripe_stats.cbegin(), raw_stripe_stats.cend(), std::back_inserter(result.stripes_stats.back()), parse_column_statistics); } return result; } namespace { orc_column_schema make_orc_column_schema(host_span<orc::SchemaType const> orc_schema, uint32_t column_id, std::string column_name) { auto const& orc_col_schema = orc_schema[column_id]; std::vector<orc_column_schema> children; children.reserve(orc_col_schema.subtypes.size()); std::transform( orc_col_schema.subtypes.cbegin(), orc_col_schema.subtypes.cend(), cudf::detail::make_counting_transform_iterator(0, [&names = orc_col_schema.fieldNames](size_t i) { return i < names.size() ? names[i] : std::string{}; }), std::back_inserter(children), [&](auto& type, auto name) { return make_orc_column_schema(orc_schema, type, name); }); return {std::move(column_name), orc_schema[column_id].kind, std::move(children)}; } }; // namespace orc_metadata read_orc_metadata(source_info const& src_info) { auto sources = make_datasources(src_info); CUDF_EXPECTS(sources.size() == 1, "Only a single source is currently supported."); auto const footer = orc::metadata(sources.front().get(), cudf::detail::default_stream_value).ff; return {{make_orc_column_schema(footer.types, 0, "")}, static_cast<size_type>(footer.numberOfRows), static_cast<size_type>(footer.stripes.size())}; } /** * @copydoc cudf::io::read_orc */ table_with_metadata read_orc(orc_reader_options const& options, rmm::mr::device_memory_resource* mr) { CUDF_FUNC_RANGE(); auto datasources = make_datasources(options.get_source()); auto reader = std::make_unique<detail_orc::reader>( std::move(datasources), options, cudf::get_default_stream(), mr); return reader->read(options); } /** * @copydoc cudf::io::write_orc */ void write_orc(orc_writer_options const& options) { namespace io_detail = cudf::io::detail; CUDF_FUNC_RANGE(); auto sinks = make_datasinks(options.get_sink()); CUDF_EXPECTS(sinks.size() == 1, "Multiple sinks not supported for ORC writing"); auto writer = std::make_unique<detail_orc::writer>( std::move(sinks[0]), options, io_detail::single_write_mode::YES, cudf::get_default_stream()); writer->write(options.get_table()); } /** * @copydoc cudf::io::orc_chunked_writer::orc_chunked_writer */ orc_chunked_writer::orc_chunked_writer(chunked_orc_writer_options const& options) { namespace io_detail = cudf::io::detail; auto sinks = make_datasinks(options.get_sink()); CUDF_EXPECTS(sinks.size() == 1, "Multiple sinks not supported for ORC writing"); writer = std::make_unique<detail_orc::writer>( std::move(sinks[0]), options, io_detail::single_write_mode::NO, cudf::get_default_stream()); } /** * @copydoc cudf::io::orc_chunked_writer::write */ orc_chunked_writer& orc_chunked_writer::write(table_view const& table) { CUDF_FUNC_RANGE(); writer->write(table); return *this; } /** * @copydoc cudf::io::orc_chunked_writer::close */ void orc_chunked_writer::close() { CUDF_FUNC_RANGE(); writer->close(); } using namespace cudf::io::parquet::detail; namespace detail_parquet = cudf::io::parquet::detail; table_with_metadata read_parquet(parquet_reader_options const& options, rmm::mr::device_memory_resource* mr) { CUDF_FUNC_RANGE(); auto datasources = make_datasources(options.get_source()); auto reader = std::make_unique<detail_parquet::reader>( std::move(datasources), options, cudf::get_default_stream(), mr); return reader->read(options); } parquet_metadata read_parquet_metadata(source_info const& src_info) { CUDF_FUNC_RANGE(); auto datasources = make_datasources(src_info); return detail_parquet::read_parquet_metadata(datasources); } /** * @copydoc cudf::io::merge_row_group_metadata */ std::unique_ptr<std::vector<uint8_t>> merge_row_group_metadata( std::vector<std::unique_ptr<std::vector<uint8_t>>> const& metadata_list) { CUDF_FUNC_RANGE(); return detail_parquet::writer::merge_row_group_metadata(metadata_list); } table_input_metadata::table_input_metadata(table_view const& table) { // Create a metadata hierarchy using `table` std::function<column_in_metadata(column_view const&)> get_children = [&](column_view const& col) { auto col_meta = column_in_metadata{}; std::transform( col.child_begin(), col.child_end(), std::back_inserter(col_meta.children), get_children); return col_meta; }; std::transform( table.begin(), table.end(), std::back_inserter(this->column_metadata), get_children); } table_input_metadata::table_input_metadata(table_metadata const& metadata) { auto const& names = metadata.schema_info; // Create a metadata hierarchy with naming and nullability using `table_metadata` std::function<column_in_metadata(column_name_info const&)> process_node = [&](column_name_info const& name) { auto col_meta = column_in_metadata{name.name}; if (name.is_nullable.has_value()) { col_meta.set_nullability(name.is_nullable.value()); } std::transform(name.children.begin(), name.children.end(), std::back_inserter(col_meta.children), process_node); return col_meta; }; std::transform( names.begin(), names.end(), std::back_inserter(this->column_metadata), process_node); } /** * @copydoc cudf::io::write_parquet */ std::unique_ptr<std::vector<uint8_t>> write_parquet(parquet_writer_options const& options) { namespace io_detail = cudf::io::detail; CUDF_FUNC_RANGE(); auto sinks = make_datasinks(options.get_sink()); auto writer = std::make_unique<detail_parquet::writer>( std::move(sinks), options, io_detail::single_write_mode::YES, cudf::get_default_stream()); writer->write(options.get_table(), options.get_partitions()); return writer->close(options.get_column_chunks_file_paths()); } /** * @copydoc cudf::io::chunked_parquet_reader::chunked_parquet_reader */ chunked_parquet_reader::chunked_parquet_reader(std::size_t chunk_read_limit, parquet_reader_options const& options, rmm::mr::device_memory_resource* mr) : reader{std::make_unique<detail_parquet::chunked_reader>(chunk_read_limit, 0, make_datasources(options.get_source()), options, cudf::get_default_stream(), mr)} { } /** * @copydoc cudf::io::chunked_parquet_reader::chunked_parquet_reader */ chunked_parquet_reader::chunked_parquet_reader(std::size_t chunk_read_limit, std::size_t pass_read_limit, parquet_reader_options const& options, rmm::mr::device_memory_resource* mr) : reader{std::make_unique<detail_parquet::chunked_reader>(chunk_read_limit, pass_read_limit, make_datasources(options.get_source()), options, cudf::get_default_stream(), mr)} { } /** * @copydoc cudf::io::chunked_parquet_reader::~chunked_parquet_reader */ chunked_parquet_reader::~chunked_parquet_reader() = default; /** * @copydoc cudf::io::chunked_parquet_reader::has_next */ bool chunked_parquet_reader::has_next() const { CUDF_FUNC_RANGE(); CUDF_EXPECTS(reader != nullptr, "Reader has not been constructed properly."); return reader->has_next(); } /** * @copydoc cudf::io::chunked_parquet_reader::read_chunk */ table_with_metadata chunked_parquet_reader::read_chunk() const { CUDF_FUNC_RANGE(); CUDF_EXPECTS(reader != nullptr, "Reader has not been constructed properly."); return reader->read_chunk(); } /** * @copydoc cudf::io::parquet_chunked_writer::parquet_chunked_writer */ parquet_chunked_writer::parquet_chunked_writer(chunked_parquet_writer_options const& options) { namespace io_detail = cudf::io::detail; auto sinks = make_datasinks(options.get_sink()); writer = std::make_unique<detail_parquet::writer>( std::move(sinks), options, io_detail::single_write_mode::NO, cudf::get_default_stream()); } /** * @copydoc cudf::io::parquet_chunked_writer::write */ parquet_chunked_writer& parquet_chunked_writer::write(table_view const& table, std::vector<partition_info> const& partitions) { CUDF_FUNC_RANGE(); writer->write(table, partitions); return *this; } /** * @copydoc cudf::io::parquet_chunked_writer::close */ std::unique_ptr<std::vector<uint8_t>> parquet_chunked_writer::close( std::vector<std::string> const& column_chunks_file_path) { CUDF_FUNC_RANGE(); return writer->close(column_chunks_file_path); } void parquet_reader_options::set_row_groups(std::vector<std::vector<size_type>> row_groups) { if ((!row_groups.empty()) and ((_skip_rows != 0) or _num_rows.has_value())) { CUDF_FAIL("row_groups can't be set along with skip_rows and num_rows"); } _row_groups = std::move(row_groups); } void parquet_reader_options::set_skip_rows(int64_t val) { CUDF_EXPECTS(val >= 0, "skip_rows cannot be negative"); CUDF_EXPECTS(_row_groups.empty(), "skip_rows can't be set along with a non-empty row_groups"); _skip_rows = val; } void parquet_reader_options::set_num_rows(size_type val) { CUDF_EXPECTS(val >= 0, "num_rows cannot be negative"); CUDF_EXPECTS(_row_groups.empty(), "num_rows can't be set along with a non-empty row_groups"); _num_rows = val; } void parquet_writer_options::set_partitions(std::vector<partition_info> partitions) { CUDF_EXPECTS(partitions.size() == _sink.num_sinks(), "Mismatch between number of sinks and number of partitions"); _partitions = std::move(partitions); } void parquet_writer_options::set_key_value_metadata( std::vector<std::map<std::string, std::string>> metadata) { CUDF_EXPECTS(metadata.size() == _sink.num_sinks(), "Mismatch between number of sinks and number of metadata maps"); _user_data = std::move(metadata); } void parquet_writer_options::set_column_chunks_file_paths(std::vector<std::string> file_paths) { CUDF_EXPECTS(file_paths.size() == _sink.num_sinks(), "Mismatch between number of sinks and number of chunk paths to set"); _column_chunks_file_paths = std::move(file_paths); } void parquet_writer_options::set_row_group_size_bytes(size_t size_bytes) { CUDF_EXPECTS( size_bytes >= 1024, "The maximum row group size cannot be smaller than the minimum page size, which is 1KB."); _row_group_size_bytes = size_bytes; } void parquet_writer_options::set_row_group_size_rows(size_type size_rows) { CUDF_EXPECTS(size_rows > 0, "The maximum row group row count must be a positive integer."); _row_group_size_rows = size_rows; } void parquet_writer_options::set_max_page_size_bytes(size_t size_bytes) { CUDF_EXPECTS(size_bytes >= 1024, "The maximum page size cannot be smaller than 1KB."); CUDF_EXPECTS(size_bytes <= static_cast<size_t>(std::numeric_limits<int32_t>::max()), "The maximum page size cannot exceed 2GB."); _max_page_size_bytes = size_bytes; } void parquet_writer_options::set_max_page_size_rows(size_type size_rows) { CUDF_EXPECTS(size_rows > 0, "The maximum page row count must be a positive integer."); _max_page_size_rows = size_rows; } void parquet_writer_options::set_column_index_truncate_length(int32_t size_bytes) { CUDF_EXPECTS(size_bytes >= 0, "Column index truncate length cannot be negative."); _column_index_truncate_length = size_bytes; } void parquet_writer_options::set_dictionary_policy(dictionary_policy policy) { _dictionary_policy = policy; } void parquet_writer_options::set_max_dictionary_size(size_t size_bytes) { CUDF_EXPECTS(size_bytes <= static_cast<size_t>(std::numeric_limits<int32_t>::max()), "The maximum dictionary size cannot exceed 2GB."); _max_dictionary_size = size_bytes; } void parquet_writer_options::set_max_page_fragment_size(size_type size_rows) { CUDF_EXPECTS(size_rows > 0, "Page fragment size must be a positive integer."); _max_page_fragment_size = size_rows; } parquet_writer_options_builder& parquet_writer_options_builder::partitions( std::vector<partition_info> partitions) { options.set_partitions(std::move(partitions)); return *this; } parquet_writer_options_builder& parquet_writer_options_builder::key_value_metadata( std::vector<std::map<std::string, std::string>> metadata) { options.set_key_value_metadata(std::move(metadata)); return *this; } parquet_writer_options_builder& parquet_writer_options_builder::column_chunks_file_paths( std::vector<std::string> file_paths) { options.set_column_chunks_file_paths(std::move(file_paths)); return *this; } parquet_writer_options_builder& parquet_writer_options_builder::dictionary_policy( enum dictionary_policy val) { options.set_dictionary_policy(val); return *this; } parquet_writer_options_builder& parquet_writer_options_builder::max_dictionary_size(size_t val) { options.set_max_dictionary_size(val); return *this; } parquet_writer_options_builder& parquet_writer_options_builder::max_page_fragment_size( size_type val) { options.set_max_page_fragment_size(val); return *this; } parquet_writer_options_builder& parquet_writer_options_builder::write_v2_headers(bool enabled) { options.enable_write_v2_headers(enabled); return *this; } void chunked_parquet_writer_options::set_key_value_metadata( std::vector<std::map<std::string, std::string>> metadata) { CUDF_EXPECTS(metadata.size() == _sink.num_sinks(), "Mismatch between number of sinks and number of metadata maps"); _user_data = std::move(metadata); } void chunked_parquet_writer_options::set_row_group_size_bytes(size_t size_bytes) { CUDF_EXPECTS( size_bytes >= 1024, "The maximum row group size cannot be smaller than the minimum page size, which is 1KB."); _row_group_size_bytes = size_bytes; } void chunked_parquet_writer_options::set_row_group_size_rows(size_type size_rows) { CUDF_EXPECTS(size_rows > 0, "The maximum row group row count must be a positive integer."); _row_group_size_rows = size_rows; } void chunked_parquet_writer_options::set_max_page_size_bytes(size_t size_bytes) { CUDF_EXPECTS(size_bytes >= 1024, "The maximum page size cannot be smaller than 1KB."); CUDF_EXPECTS(size_bytes <= static_cast<size_t>(std::numeric_limits<int32_t>::max()), "The maximum page size cannot exceed 2GB."); _max_page_size_bytes = size_bytes; } void chunked_parquet_writer_options::set_max_page_size_rows(size_type size_rows) { CUDF_EXPECTS(size_rows > 0, "The maximum page row count must be a positive integer."); _max_page_size_rows = size_rows; } void chunked_parquet_writer_options::set_column_index_truncate_length(int32_t size_bytes) { CUDF_EXPECTS(size_bytes >= 0, "Column index truncate length cannot be negative."); _column_index_truncate_length = size_bytes; } void chunked_parquet_writer_options::set_dictionary_policy(dictionary_policy policy) { _dictionary_policy = policy; } void chunked_parquet_writer_options::set_max_dictionary_size(size_t size_bytes) { CUDF_EXPECTS(size_bytes <= static_cast<size_t>(std::numeric_limits<int32_t>::max()), "The maximum dictionary size cannot exceed 2GB."); _max_dictionary_size = size_bytes; } void chunked_parquet_writer_options::set_max_page_fragment_size(size_type size_rows) { CUDF_EXPECTS(size_rows > 0, "Page fragment size must be a positive integer."); _max_page_fragment_size = size_rows; } chunked_parquet_writer_options_builder& chunked_parquet_writer_options_builder::key_value_metadata( std::vector<std::map<std::string, std::string>> metadata) { options.set_key_value_metadata(std::move(metadata)); return *this; } chunked_parquet_writer_options_builder& chunked_parquet_writer_options_builder::dictionary_policy( enum dictionary_policy val) { options.set_dictionary_policy(val); return *this; } chunked_parquet_writer_options_builder& chunked_parquet_writer_options_builder::max_dictionary_size( size_t val) { options.set_max_dictionary_size(val); return *this; } chunked_parquet_writer_options_builder& chunked_parquet_writer_options_builder::write_v2_headers( bool enabled) { options.enable_write_v2_headers(enabled); return *this; } chunked_parquet_writer_options_builder& chunked_parquet_writer_options_builder::max_page_fragment_size(size_type val) { options.set_max_page_fragment_size(val); return *this; } } // namespace io } // namespace cudf
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/comp/gpuinflate.hpp
/* * Copyright (c) 2018-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #pragma once #include <cudf/io/types.hpp> #include <cudf/utilities/span.hpp> #include <rmm/cuda_stream_view.hpp> #include <cstdint> namespace cudf { namespace io { /** * @brief Status of a compression/decompression operation. */ enum class compression_status : uint8_t { SUCCESS, ///< Successful, output is valid FAILURE, ///< Failed, output is invalid (e.g. input is unsupported in some way) SKIPPED, ///< Operation skipped (if conversion, uncompressed data can be used) OUTPUT_OVERFLOW, ///< Output buffer is too small; operation can succeed with larger output }; /** * @brief Descriptor of compression/decompression result. */ struct compression_result { uint64_t bytes_written; compression_status status; uint32_t reserved; }; enum class gzip_header_included { NO, YES }; /** * @brief The value used for padding a data buffer such that its size will be multiple of it. * * Padding is necessary for input/output buffers of several compression/decompression kernels * (inflate_kernel and nvcomp snappy). Such kernels operate on aligned data pointers, which require * padding to the buffers so that the pointers can shift along the address space to satisfy their * alignment requirement. * * In the meantime, it is not entirely clear why such padding is needed. We need to further * investigate and implement a better fix rather than just padding the buffer. * See https://github.com/rapidsai/cudf/issues/13605. */ constexpr std::size_t BUFFER_PADDING_MULTIPLE{8}; /** * @brief Interface for decompressing GZIP-compressed data * * Multiple, independent chunks of compressed data can be decompressed by using * separate input/output/status for each chunk. * * @param[in] inputs List of input buffers * @param[out] outputs List of output buffers * @param[out] results List of output status structures * @param[in] parse_hdr Whether or not to parse GZIP header * @param[in] stream CUDA stream to use */ void gpuinflate(device_span<device_span<uint8_t const> const> inputs, device_span<device_span<uint8_t> const> outputs, device_span<compression_result> results, gzip_header_included parse_hdr, rmm::cuda_stream_view stream); /** * @brief Interface for copying uncompressed byte blocks * * @param[in] inputs List of input buffers * @param[out] outputs List of output buffers * @param[in] stream CUDA stream to use */ void gpu_copy_uncompressed_blocks(device_span<device_span<uint8_t const> const> inputs, device_span<device_span<uint8_t> const> outputs, rmm::cuda_stream_view stream); /** * @brief Interface for decompressing Snappy-compressed data * * Multiple, independent chunks of compressed data can be decompressed by using * separate input/output/status for each chunk. * * @param[in] inputs List of input buffers * @param[out] outputs List of output buffers * @param[out] results List of output status structures * @param[in] stream CUDA stream to use */ void gpu_unsnap(device_span<device_span<uint8_t const> const> inputs, device_span<device_span<uint8_t> const> outputs, device_span<compression_result> results, rmm::cuda_stream_view stream); /** * @brief Computes the size of temporary memory for Brotli decompression * * @param[in] max_num_inputs The maximum number of compressed input chunks * * @return The size in bytes of required temporary memory */ size_t get_gpu_debrotli_scratch_size(int max_num_inputs = 0); /** * @brief Interface for decompressing Brotli-compressed data * * Multiple, independent chunks of compressed data can be decompressed by using * separate input/output/status pairs for each chunk. * * @param[in] inputs List of input buffers * @param[out] outputs List of output buffers * @param[out] results List of output status structures * @param[in] scratch Temporary memory for intermediate work * @param[in] scratch_size Size in bytes of the temporary memory * @param[in] stream CUDA stream to use */ void gpu_debrotli(device_span<device_span<uint8_t const> const> inputs, device_span<device_span<uint8_t> const> outputs, device_span<compression_result> results, void* scratch, size_t scratch_size, rmm::cuda_stream_view stream); /** * @brief Interface for compressing data with Snappy * * Multiple, independent chunks of compressed data can be compressed by using * separate input/output/status for each chunk. * * @param[in] inputs List of input buffers * @param[out] outputs List of output buffers * @param[out] results List of output status structures * @param[in] stream CUDA stream to use */ void gpu_snap(device_span<device_span<uint8_t const> const> inputs, device_span<device_span<uint8_t> const> outputs, device_span<compression_result> results, rmm::cuda_stream_view stream); /** * @brief Aggregate results of compression into a single statistics object. * * @param inputs List of uncompressed input buffers * @param results List of compression results * @param stream CUDA stream to use * @return writer_compression_statistics */ [[nodiscard]] writer_compression_statistics collect_compression_statistics( device_span<device_span<uint8_t const> const> inputs, device_span<compression_result const> results, rmm::cuda_stream_view stream); } // namespace io } // namespace cudf
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/comp/nvcomp_adapter.hpp
/* * Copyright (c) 2022-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #pragma once #include "gpuinflate.hpp" #include <io/utilities/config_utils.hpp> #include <cudf/utilities/error.hpp> #include <cudf/utilities/span.hpp> #include <rmm/cuda_stream_view.hpp> #include <optional> namespace cudf::io::nvcomp { enum class compression_type { SNAPPY, ZSTD, DEFLATE }; /** * @brief Set of parameters that impact whether the use nvCOMP features is enabled. */ struct feature_status_parameters { int lib_major_version; int lib_minor_version; int lib_patch_version; bool are_all_integrations_enabled; bool are_stable_integrations_enabled; int compute_capability_major; feature_status_parameters(); feature_status_parameters( int major, int minor, int patch, bool all_enabled, bool stable_enabled, int cc_major) : lib_major_version{major}, lib_minor_version{minor}, lib_patch_version{patch}, are_all_integrations_enabled{all_enabled}, are_stable_integrations_enabled{stable_enabled}, compute_capability_major{cc_major} { } }; /** * @brief Equality operator overload. Required to use `feature_status_parameters` as a map key. */ inline bool operator==(feature_status_parameters const& lhs, feature_status_parameters const& rhs) { return lhs.lib_major_version == rhs.lib_major_version and lhs.lib_minor_version == rhs.lib_minor_version and lhs.lib_patch_version == rhs.lib_patch_version and lhs.are_all_integrations_enabled == rhs.are_all_integrations_enabled and lhs.are_stable_integrations_enabled == rhs.are_stable_integrations_enabled and lhs.compute_capability_major == rhs.compute_capability_major; } /** * @brief If a compression type is disabled through nvCOMP, returns the reason as a string. * * Result cab depend on nvCOMP version and environment variables. * * @param compression Compression type * @param params Optional parameters to query status with different configurations * @returns Reason for the feature disablement, `std::nullopt` if the feature is enabled */ [[nodiscard]] std::optional<std::string> is_compression_disabled( compression_type compression, feature_status_parameters params = feature_status_parameters()); /** * @brief If a decompression type is disabled through nvCOMP, returns the reason as a string. * * Result can depend on nvCOMP version and environment variables. * * @param compression Compression type * @param params Optional parameters to query status with different configurations * @returns Reason for the feature disablement, `std::nullopt` if the feature is enabled */ [[nodiscard]] std::optional<std::string> is_decompression_disabled( compression_type compression, feature_status_parameters params = feature_status_parameters()); /** * @brief Device batch decompression of given type. * * @param[in] compression Compression type * @param[in] inputs List of input buffers * @param[out] outputs List of output buffers * @param[out] results List of output status structures * @param[in] max_uncomp_chunk_size maximum size of uncompressed chunk * @param[in] max_total_uncomp_size maximum total size of uncompressed data * @param[in] stream CUDA stream to use */ void batched_decompress(compression_type compression, device_span<device_span<uint8_t const> const> inputs, device_span<device_span<uint8_t> const> outputs, device_span<compression_result> results, size_t max_uncomp_chunk_size, size_t max_total_uncomp_size, rmm::cuda_stream_view stream); /** * @brief Gets the maximum size any chunk could compress to in the batch. * * @param compression Compression type * @param max_uncomp_chunk_size Size of the largest uncompressed chunk in the batch */ [[nodiscard]] size_t compress_max_output_chunk_size(compression_type compression, uint32_t max_uncomp_chunk_size); /** * @brief Gets input alignment requirements for the given compression type. * * @param compression Compression type * @returns required alignment, in bits */ [[nodiscard]] size_t compress_input_alignment_bits(compression_type compression); /** * @brief Gets output alignment requirements for the given compression type. * * @param compression Compression type * @returns required alignment, in bits */ [[nodiscard]] size_t compress_output_alignment_bits(compression_type compression); /** * @brief Maximum size of uncompressed chunks that can be compressed with nvCOMP. * * @param compression Compression type * @returns maximum chunk size */ [[nodiscard]] std::optional<size_t> compress_max_allowed_chunk_size(compression_type compression); /** * @brief Device batch compression of given type. * * @param[in] compression Compression type * @param[in] inputs List of input buffers * @param[out] outputs List of output buffers * @param[out] results List of output status structures * @param[in] stream CUDA stream to use */ void batched_compress(compression_type compression, device_span<device_span<uint8_t const> const> inputs, device_span<device_span<uint8_t> const> outputs, device_span<compression_result> results, rmm::cuda_stream_view stream); } // namespace cudf::io::nvcomp
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/comp/unbz2.hpp
/* * Copyright (c) 2018-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * * bzip2 license information is available at * https://spdx.org/licenses/bzip2-1.0.6.html * https://github.com/asimonov-im/bzip2/blob/master/LICENSE * original source code available at * http://www.sourceware.org/bzip2/ */ /*-- Copyright (C) 1996-2002 Julian R Seward. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required. 3. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software. 4. The name of the author may not be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Julian Seward, Cambridge, UK. [email protected] bzip2/libbzip2 version 1.0 of 21 March 2000 This program is based on (at least) the work of: Mike Burrows David Wheeler Peter Fenwick Alistair Moffat Radford Neal Ian H. Witten Robert Sedgewick Jon L. Bentley For more information on these sources, see the manual. --*/ #pragma once namespace cudf { namespace io { // If BZ_OUTBUFF_FULL is returned and block_start is non-NULL, dstlen will be updated to point to // the end of the last valid block, and block_start will contain the offset in bits of the beginning // of the block, so it can be passed in to resume decoding later on. #define BZ_OK 0 #define BZ_RUN_OK 1 #define BZ_FLUSH_OK 2 #define BZ_FINISH_OK 3 #define BZ_STREAM_END 4 #define BZ_SEQUENCE_ERROR (-1) #define BZ_PARAM_ERROR (-2) #define BZ_MEM_ERROR (-3) #define BZ_DATA_ERROR (-4) #define BZ_DATA_ERROR_MAGIC (-5) #define BZ_IO_ERROR (-6) #define BZ_UNEXPECTED_EOF (-7) #define BZ_OUTBUFF_FULL (-8) int32_t cpu_bz2_uncompress(uint8_t const* input, size_t inlen, uint8_t* dst, size_t* dstlen, uint64_t* block_start = nullptr); } // namespace io } // namespace cudf
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/comp/io_uncomp.hpp
/* * Copyright (c) 2018-2022, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #pragma once #include <cudf/io/types.hpp> #include <cudf/utilities/span.hpp> #include <memory> #include <string> #include <vector> using cudf::host_span; namespace cudf { namespace io { /** * @brief Decompresses a system memory buffer. * * @param compression Type of compression of the input data * @param src Compressed host buffer * * @return Vector containing the Decompressed output */ std::vector<uint8_t> decompress(compression_type compression, host_span<uint8_t const> src); size_t decompress(compression_type compression, host_span<uint8_t const> src, host_span<uint8_t> dst, rmm::cuda_stream_view stream); /** * @brief GZIP header flags * See https://tools.ietf.org/html/rfc1952 */ namespace GZIPHeaderFlag { constexpr uint8_t ftext = 0x01; // ASCII text hint constexpr uint8_t fhcrc = 0x02; // Header CRC present constexpr uint8_t fextra = 0x04; // Extra fields present constexpr uint8_t fname = 0x08; // Original file name present constexpr uint8_t fcomment = 0x10; // Comment present }; // namespace GZIPHeaderFlag } // namespace io } // namespace cudf
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/comp/gpuinflate.cu
/* * Copyright (c) 2018-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** @file gpuinflate.cu Derived from zlib's contrib/puff.c, original copyright notice below */ /* Copyright (C) 2002-2013 Mark Adler, all rights reserved version 2.3, 21 Jan 2013 This software is provided 'as-is', without any express or implied warranty. In no event will the author be held liable for any damages arising from the use of this software. Permission is granted to anyone to use this software for any purpose, including commercial applications, and to alter it and redistribute it freely, subject to the following restrictions: 1. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required. 2. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software. 3. This notice may not be removed or altered from any source distribution. Mark Adler [email protected] */ #include "gpuinflate.hpp" #include "io_uncomp.hpp" #include <io/utilities/block_utils.cuh> #include <rmm/cuda_stream_view.hpp> namespace cudf { namespace io { constexpr int max_bits = 15; // maximum bits in a code constexpr int max_l_codes = 286; // maximum number of literal/length codes constexpr int max_d_codes = 30; // maximum number of distance codes constexpr int fix_l_codes = 288; // number of fixed literal/length codes constexpr int log2_len_lut = 10; constexpr int log2_dist_lut = 8; /** * @brief Intermediate arrays for building huffman tables */ struct scratch_arr { int16_t lengths[max_l_codes + max_d_codes]; ///< descriptor code lengths int16_t offs[max_bits + 1]; ///< offset in symbol table for each length (scratch) }; /** * @brief Huffman LUTs for length and distance codes */ struct lut_arr { int32_t lenlut[1 << log2_len_lut]; ///< LUT for length decoding int32_t distlut[1 << log2_dist_lut]; ///< LUT for fast distance decoding }; /// 4 batches of 32 symbols constexpr int log2_batch_count = 2; // 1..5 constexpr int log2_batch_size = 5; constexpr int batch_count = (1 << log2_batch_count); constexpr int batch_size = (1 << log2_batch_size); /** * @brief Inter-warp communication queue */ struct xwarp_s { int32_t batch_len[batch_count]; //< Length of each batch - <0:end, 0:not ready, >0:symbol count union { uint32_t symqueue[batch_count * batch_size]; uint8_t symqueue8[batch_count * batch_size * 4]; } u; }; #define ENABLE_PREFETCH 1 #if ENABLE_PREFETCH constexpr int log2_prefetch_size = 9; // Must be at least LOG2_BATCH_SIZE+3 constexpr int prefetch_size = (1 << log2_prefetch_size); /// @brief Prefetcher state struct prefetch_queue_s { uint8_t const* cur_p; ///< Prefetch location int run; ///< prefetcher will exit when run=0 uint8_t pref_data[prefetch_size]; }; template <typename T> inline __device__ volatile uint32_t* prefetch_addr32(volatile prefetch_queue_s& q, T* ptr) { return reinterpret_cast<volatile uint32_t*>(&q.pref_data[(prefetch_size - 4) & (size_t)(ptr)]); } #endif // ENABLE_PREFETCH /** * @brief Inflate decompressor state */ struct inflate_state_s { // output state uint8_t* out; ///< output buffer uint8_t* outbase; ///< start of output buffer uint8_t* outend; ///< end of output buffer // Input state uint8_t const* cur; ///< input buffer uint8_t const* end; ///< end of input buffer uint2 bitbuf; ///< bit buffer (64-bit) uint32_t bitpos; ///< position in bit buffer int32_t err; ///< Error status int btype; ///< current block type int blast; ///< last block uint32_t stored_blk_len; ///< length of stored (uncompressed) block uint16_t first_slow_len; ///< first code not in fast LUT uint16_t index_slow_len; uint16_t first_slow_dist; uint16_t index_slow_dist; volatile xwarp_s x; #if ENABLE_PREFETCH volatile prefetch_queue_s pref; #endif int16_t lencnt[max_bits + 1]; int16_t lensym[fix_l_codes]; // Assumes fix_l_codes >= max_l_codes int16_t distcnt[max_bits + 1]; int16_t distsym[max_d_codes]; union { scratch_arr scratch; lut_arr lut; } u; }; inline __device__ unsigned int bfe(unsigned int source, unsigned int bit_start, unsigned int num_bits) { unsigned int bits; asm("bfe.u32 %0, %1, %2, %3;" : "=r"(bits) : "r"(source), "r"(bit_start), "r"(num_bits)); return bits; }; inline __device__ uint32_t showbits(inflate_state_s* s, uint32_t n) { uint32_t next32 = __funnelshift_rc(s->bitbuf.x, s->bitbuf.y, s->bitpos); return (next32 & ((1 << n) - 1)); } inline __device__ uint32_t nextbits32(inflate_state_s* s) { return __funnelshift_rc(s->bitbuf.x, s->bitbuf.y, s->bitpos); } inline __device__ void skipbits(inflate_state_s* s, uint32_t n) { uint32_t bitpos = s->bitpos + n; if (bitpos >= 32) { auto cur = s->cur + 8; s->bitbuf.x = s->bitbuf.y; s->bitbuf.y = (cur < s->end) ? *reinterpret_cast<uint32_t const*>(cur) : 0; s->cur = cur - 4; bitpos &= 0x1f; } s->bitpos = bitpos; } // TODO: If we require 4-byte alignment of input bitstream & length (padded), reading bits would // become quite a bit faster __device__ uint32_t getbits(inflate_state_s* s, uint32_t n) { uint32_t v = showbits(s, n); skipbits(s, n); return v; } /** * @brief Decode a code from the stream s using huffman table {symbols,counts}. * Return the symbol or a negative value if there is an error. * If all of the lengths are zero, i.e. an empty code, or if the code is * incomplete and an invalid code is received, then -10 is returned after * reading max_bits bits. * * Format notes: * * - The codes as stored in the compressed data are bit-reversed relative to * a simple integer ordering of codes of the same lengths. Hence below the * bits are pulled from the compressed data one at a time and used to * build the code value reversed from what is in the stream in order to * permit simple integer comparisons for decoding. A table-based decoding * scheme (as used in zlib) does not need to do this reversal. * * - The first code for the shortest length is all zeros. Subsequent codes of * the same length are simply integer increments of the previous code. When * moving up a length, a zero bit is appended to the code. For a complete * code, the last code of the longest length will be all ones. * * - Incomplete codes are handled by this decoder, since they are permitted * in the deflate format. See the format notes for fixed() and dynamic(). */ __device__ int decode(inflate_state_s* s, int16_t const* counts, int16_t const* symbols) { unsigned int len; // current number of bits in code unsigned int code; // len bits being decoded unsigned int first; // first code of length len unsigned int count; // number of codes of length len uint32_t next32r = __brev(nextbits32(s)); first = 0; for (len = 1; len <= max_bits; len++) { code = (next32r >> (32 - len)) - first; count = counts[len]; if (code < count) // if length len, return symbol { skipbits(s, len); return symbols[code]; } symbols += count; // else update for next length first += count; first <<= 1; } return -10; // ran out of codes } /** * @brief Given the list of code lengths length[0..n-1] representing a canonical * Huffman code for n symbols, construct the tables required to decode those * codes. Those tables are the number of codes of each length, and the symbols * sorted by length, retaining their original order within each length. The * return value is zero for a complete code set, negative for an over- * subscribed code set, and positive for an incomplete code set. The tables * can be used if the return value is zero or positive, but they cannot be used * if the return value is negative. If the return value is zero, it is not * possible for decode() using that table to return an error--any stream of * enough bits will resolve to a symbol. If the return value is positive, then * it is possible for decode() using that table to return an error for received * codes past the end of the incomplete lengths. * * Not used by decode(), but used for error checking, count[0] is the number * of the n symbols not in the code. So n - count[0] is the number of * codes. This is useful for checking for incomplete codes that have more than * one symbol, which is an error in a dynamic block. * * Assumption: for all i in 0..n-1, 0 <= length[i] <= max_bits * This is assured by the construction of the length arrays in dynamic() and * fixed() and is not verified by construct(). * * Format notes: * * - Permitted and expected examples of incomplete codes are one of the fixed * codes and any code with a single symbol which in deflate is coded as one * bit instead of zero bits. See the format notes for fixed() and dynamic(). * * - Within a given code length, the symbols are kept in ascending order for * the code bits definition. */ __device__ int construct( inflate_state_s* s, int16_t* counts, int16_t* symbols, int16_t const* length, int n) { int symbol; // current symbol when stepping through length[] int len; // current length when stepping through counts[] int left; // number of possible codes left of current length int16_t* offs = s->u.scratch.offs; // count number of codes of each length for (len = 0; len <= max_bits; len++) counts[len] = 0; for (symbol = 0; symbol < n; symbol++) (counts[length[symbol]])++; // assumes lengths are within bounds if (counts[0] == n) // no codes! return 0; // complete, but decode() will fail // check for an over-subscribed or incomplete set of lengths left = 1; // one possible code of zero length for (len = 1; len <= max_bits; len++) { left <<= 1; // one more bit, double codes left left -= counts[len]; // deduct count from possible codes if (left < 0) return left; // over-subscribed--return negative } // left > 0 means incomplete // generate offsets into symbol table for each length for sorting offs[1] = 0; for (len = 1; len < max_bits; len++) offs[len + 1] = offs[len] + counts[len]; // put symbols in table sorted by length, by symbol order within each length for (symbol = 0; symbol < n; symbol++) if (length[symbol] != 0) symbols[offs[length[symbol]]++] = symbol; // return zero for complete set, positive for incomplete set return left; } /// permutation of code length codes static const __device__ __constant__ uint8_t g_code_order[19 + 1] = { 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15, 0xff}; /// Dynamic block (custom huffman tables) __device__ int init_dynamic(inflate_state_s* s) { int nlen, ndist, ncode; /* number of lengths in descriptor */ int index; /* index of lengths[] */ int err; /* construct() return value */ int16_t* lengths = s->u.scratch.lengths; // get number of lengths in each table, check lengths nlen = getbits(s, 5) + 257; ndist = getbits(s, 5) + 1; ncode = getbits(s, 4) + 4; if (nlen > max_l_codes || ndist > max_d_codes) { return -3; // bad counts } // read code length code lengths (really), missing lengths are zero for (index = 0; index < ncode; index++) lengths[g_code_order[index]] = getbits(s, 3); for (; index < 19; index++) lengths[g_code_order[index]] = 0; // build huffman table for code lengths codes (use lencode temporarily) err = construct(s, s->lencnt, s->lensym, lengths, 19); if (err != 0) // require complete code set here return -4; // read length/literal and distance code length tables index = 0; while (index < nlen + ndist) { int symbol = decode(s, s->lencnt, s->lensym); if (symbol < 0) return symbol; // invalid symbol if (symbol < 16) // length in 0..15 lengths[index++] = symbol; else { // repeat instruction int len = 0; // last length to repeat, assume repeating zeros if (symbol == 16) { // repeat last length 3..6 times if (index == 0) return -5; // no last length! len = lengths[index - 1]; // last length symbol = 3 + getbits(s, 2); } else if (symbol == 17) // repeat zero 3..10 times symbol = 3 + getbits(s, 3); else // == 18, repeat zero 11..138 times symbol = 11 + getbits(s, 7); if (index + symbol > nlen + ndist) return -6; // too many lengths! while (symbol--) // repeat last or zero symbol times lengths[index++] = len; } } // check for end-of-block code -- there better be one! if (lengths[256] == 0) return -9; // build huffman table for literal/length codes err = construct(s, s->lencnt, s->lensym, lengths, nlen); if (err && (err < 0 || nlen != s->lencnt[0] + s->lencnt[1])) return -7; // incomplete code ok only for single length 1 code // build huffman table for distance codes err = construct(s, s->distcnt, s->distsym, &lengths[nlen], ndist); if (err && (err < 0 || ndist != s->distcnt[0] + s->distcnt[1])) return -8; // incomplete code ok only for single length 1 code return 0; } /** * @brief Initializes a fixed codes block. * * Format notes: * * - This block type can be useful for compressing small amounts of data for * which the size of the code descriptions in a dynamic block exceeds the * benefit of custom codes for that block. For fixed codes, no bits are * spent on code descriptions. Instead the code lengths for literal/length * codes and distance codes are fixed. The specific lengths for each symbol * can be seen in the "for" loops below. * * - The literal/length code is complete, but has two symbols that are invalid * and should result in an error if received. This cannot be implemented * simply as an incomplete code since those two symbols are in the "middle" * of the code. They are eight bits long and the longest literal/length\ * code is nine bits. Therefore the code must be constructed with those * symbols, and the invalid symbols must be detected after decoding. * * - The fixed distance codes also have two invalid symbols that should result * in an error if received. Since all of the distance codes are the same * length, this can be implemented as an incomplete code. Then the invalid * codes are detected while decoding. */ __device__ int init_fixed(inflate_state_s* s) { int16_t* lengths = s->u.scratch.lengths; int symbol; // literal/length table for (symbol = 0; symbol < 144; symbol++) lengths[symbol] = 8; for (; symbol < 256; symbol++) lengths[symbol] = 9; for (; symbol < 280; symbol++) lengths[symbol] = 7; for (; symbol < fix_l_codes; symbol++) lengths[symbol] = 8; construct(s, s->lencnt, s->lensym, lengths, fix_l_codes); // distance table for (symbol = 0; symbol < max_d_codes; symbol++) lengths[symbol] = 5; // build huffman table for distance codes construct(s, s->distcnt, s->distsym, lengths, max_d_codes); return 0; } /** * @brief Decode literal/length and distance codes until an end-of-block code. * * Format notes: * * - Compressed data that is after the block type if fixed or after the code * description if dynamic is a combination of literals and length/distance * pairs terminated by and end-of-block code. Literals are simply Huffman * coded bytes. A length/distance pair is a coded length followed by a * coded distance to represent a string that occurs earlier in the * uncompressed data that occurs again at the current location. * * - Literals, lengths, and the end-of-block code are combined into a single * code of up to 286 symbols. They are 256 literals (0..255), 29 length * symbols (257..285), and the end-of-block symbol (256). * * - There are 256 possible lengths (3..258), and so 29 symbols are not enough * to represent all of those. Lengths 3..10 and 258 are in fact represented * by just a length symbol. Lengths 11..257 are represented as a symbol and * some number of extra bits that are added as an integer to the base length * of the length symbol. The number of extra bits is determined by the base * length symbol. These are in the static arrays below, lens[] for the base * lengths and lext[] for the corresponding number of extra bits. * * - The reason that 258 gets its own symbol is that the longest length is used * often in highly redundant files. Note that 258 can also be coded as the * base value 227 plus the maximum extra value of 31. While a good deflate * should never do this, it is not an error, and should be decoded properly. * * - If a length is decoded, including its extra bits if any, then it is * followed a distance code. There are up to 30 distance symbols. Again * there are many more possible distances (1..32768), so extra bits are added * to a base value represented by the symbol. The distances 1..4 get their * own symbol, but the rest require extra bits. The base distances and * corresponding number of extra bits are below in the static arrays dist[] * and dext[]. * * - Literal bytes are simply written to the output. A length/distance pair is * an instruction to copy previously uncompressed bytes to the output. The * copy is from distance bytes back in the output stream, copying for length * bytes. * * - Distances pointing before the beginning of the output data are not * permitted. * * - Overlapped copies, where the length is greater than the distance, are * allowed and common. For example, a distance of one and a length of 258 * simply copies the last byte 258 times. A distance of four and a length of * twelve copies the last four bytes three times. A simple forward copy * ignoring whether the length is greater than the distance or not implements * this correctly. You should not use memcpy() since its behavior is not * defined for overlapped arrays. You should not use memmove() or bcopy() * since though their behavior -is- defined for overlapping arrays, it is * defined to do the wrong thing in this case. */ /// permutation of code length codes static const __device__ __constant__ uint16_t g_lens[29] = { // Size base for length codes 257..285 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31, 35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 258}; static const __device__ __constant__ uint16_t g_lext[29] = { // Extra bits for length codes 257..285 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 0}; static const __device__ __constant__ uint16_t g_dists[30] = { // Offset base for distance codes 0..29 1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193, 257, 385, 513, 769, 1025, 1537, 2049, 3073, 4097, 6145, 8193, 12289, 16385, 24577}; static const __device__ __constant__ uint16_t g_dext[30] = { // Extra bits for distance codes 0..29 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 13, 13}; /// @brief Thread 0 only: decode bitstreams and output symbols into the symbol queue __device__ void decode_symbols(inflate_state_s* s) { uint32_t bitpos = s->bitpos; uint2 bitbuf = s->bitbuf; auto cur = s->cur; auto end = s->end; int32_t batch = 0; int32_t sym, batch_len; do { volatile uint32_t* b = &s->x.u.symqueue[batch * batch_size]; // Wait for the next batch entry to be empty #if ENABLE_PREFETCH // Wait for prefetcher to fetch a worst-case of 48 bits per symbol while ((*(volatile int32_t*)&s->pref.cur_p - (int32_t)(size_t)cur < batch_size * 6) || (s->x.batch_len[batch] != 0)) {} #else while (s->x.batch_len[batch] != 0) {} #endif batch_len = 0; #if ENABLE_PREFETCH if (cur + (bitpos >> 3) >= end) { s->err = 1; break; } #endif // Inner loop decoding symbols do { uint32_t next32 = __funnelshift_rc(bitbuf.x, bitbuf.y, bitpos); // nextbits32(s); uint32_t len; sym = s->u.lut.lenlut[next32 & ((1 << log2_len_lut) - 1)]; if ((uint32_t)sym < (uint32_t)(0x100 << 5)) { // We can lookup a second symbol if this was a short literal len = sym & 0x1f; sym >>= 5; b[batch_len++] = sym; next32 >>= len; bitpos += len; sym = s->u.lut.lenlut[next32 & ((1 << log2_len_lut) - 1)]; } if (sym > 0) // short symbol { len = sym & 0x1f; sym = ((sym >> 5) & 0x3ff) + ((next32 >> (sym >> 24)) & ((sym >> 16) & 0x1f)); } else { // Slow length path uint32_t next32r = __brev(next32); int16_t const* symbols = &s->lensym[s->index_slow_len]; unsigned int first = s->first_slow_len; int lext; #pragma unroll 1 for (len = log2_len_lut + 1; len <= max_bits; len++) { unsigned int code = (next32r >> (32 - len)) - first; unsigned int count = s->lencnt[len]; if (code < count) // if length len, return symbol { sym = symbols[code]; break; } symbols += count; // else update for next length first += count; first <<= 1; } if (len > max_bits) { s->err = -10; sym = 256; len = 0; } if (sym > 256) { sym -= 257; lext = g_lext[sym]; sym = 256 + g_lens[sym] + bfe(next32, len, lext); len += lext; } } if (sym > 256) { int dist, dext; // skipbits(s, len) inlined - no limit check bitpos += len; if (bitpos >= 32) { bitbuf.x = bitbuf.y; #if ENABLE_PREFETCH bitbuf.y = *prefetch_addr32(s->pref, cur + 8); cur += 4; #else cur += 8; bitbuf.y = (cur < end) ? *(uint32_t const*)cur : 0; cur -= 4; #endif bitpos &= 0x1f; } // get distance next32 = __funnelshift_rc(bitbuf.x, bitbuf.y, bitpos); // nextbits32(s); dist = s->u.lut.distlut[next32 & ((1 << log2_dist_lut) - 1)]; if (dist > 0) { len = dist & 0x1f; dext = bfe(dist, 20, 5); dist = bfe(dist, 5, 15); sym |= (dist + bfe(next32, len, dext)) << 16; len += dext; } else { uint32_t next32r = __brev(next32); int16_t const* symbols = &s->distsym[s->index_slow_dist]; unsigned int first = s->first_slow_dist; #pragma unroll 1 for (len = log2_dist_lut + 1; len <= max_bits; len++) { unsigned int code = (next32r >> (32 - len)) - first; unsigned int count = s->distcnt[len]; if (code < count) // if length len, return symbol { dist = symbols[code]; break; } symbols += count; // else update for next length first += count; first <<= 1; } if (len > max_bits) { s->err = -10; sym = 256; len = 0; } else { dext = g_dext[dist]; sym |= (g_dists[dist] + bfe(next32, len, dext)) << 16; len += dext; } } } // skipbits(s, len) inlined with added error check for reading past the end of the input // buffer bitpos += len; if (bitpos >= 32) { bitbuf.x = bitbuf.y; #if ENABLE_PREFETCH bitbuf.y = *prefetch_addr32(s->pref, cur + 8); cur += 4; #else cur += 8; if (cur < end) { bitbuf.y = *(uint32_t const*)cur; cur -= 4; } else { bitbuf.y = 0; cur -= 4; if (cur > end) { s->err = 1; sym = 256; } } #endif bitpos &= 0x1f; } if (sym == 256) break; b[batch_len++] = sym; } while (batch_len < batch_size - 1); s->x.batch_len[batch] = batch_len; #if ENABLE_PREFETCH ((volatile inflate_state_s*)s)->cur = cur; #endif if (batch_len != 0) batch = (batch + 1) & (batch_count - 1); } while (sym != 256); while (s->x.batch_len[batch] != 0) {} s->x.batch_len[batch] = -1; s->bitbuf = bitbuf; s->bitpos = bitpos; #if !ENABLE_PREFETCH s->cur = cur; #endif } /** * @brief Build lookup tables for faster decode * LUT format is symbols*16+length */ __device__ void init_length_lut(inflate_state_s* s, int t) { int32_t* lut = s->u.lut.lenlut; for (uint32_t bits = t; bits < (1 << log2_len_lut); bits += blockDim.x) { int16_t const* cnt = s->lencnt; int16_t const* symbols = s->lensym; int sym = -10 << 5; unsigned int first = 0; unsigned int rbits = __brev(bits) >> (32 - log2_len_lut); for (unsigned int len = 1; len <= log2_len_lut; len++) { unsigned int code = (rbits >> (log2_len_lut - len)) - first; unsigned int count = cnt[len]; if (code < count) { sym = symbols[code]; if (sym > 256) { int lext = g_lext[sym - 257]; sym = (256 + g_lens[sym - 257]) | (((1 << lext) - 1) << (16 - 5)) | (len << (24 - 5)); len += lext; } sym = (sym << 5) | len; break; } symbols += count; // else update for next length first += count; first <<= 1; } lut[bits] = sym; } if (!t) { unsigned int first = 0; unsigned int index = 0; int16_t const* cnt = s->lencnt; for (unsigned int len = 1; len <= log2_len_lut; len++) { unsigned int count = cnt[len]; index += count; first += count; first <<= 1; } s->first_slow_len = first; s->index_slow_len = index; } } /** * @brief Build lookup tables for faster decode of distance symbol * LUT format is symbols*16+length */ __device__ void init_distance_lut(inflate_state_s* s, int t) { int32_t* lut = s->u.lut.distlut; for (uint32_t bits = t; bits < (1 << log2_dist_lut); bits += blockDim.x) { int16_t const* cnt = s->distcnt; int16_t const* symbols = s->distsym; int sym = 0; unsigned int first = 0; unsigned int rbits = __brev(bits) >> (32 - log2_dist_lut); for (unsigned int len = 1; len <= log2_dist_lut; len++) { unsigned int code = (rbits >> (log2_dist_lut - len)) - first; unsigned int count = cnt[len]; if (code < count) { int dist = symbols[code]; int dext = g_dext[dist]; sym = g_dists[dist] | (dext << 15); sym = (sym << 5) | len; break; } symbols += count; // else update for next length first += count; first <<= 1; } lut[bits] = sym; } if (!t) { unsigned int first = 0; unsigned int index = 0; int16_t const* cnt = s->distcnt; for (unsigned int len = 1; len <= log2_dist_lut; len++) { unsigned int count = cnt[len]; index += count; first += count; first <<= 1; } s->first_slow_dist = first; s->index_slow_dist = index; } } /// @brief WARP1: process symbols and output uncompressed stream __device__ void process_symbols(inflate_state_s* s, int t) { uint8_t* out = s->out; uint8_t const* outend = s->outend; uint8_t const* outbase = s->outbase; int batch = 0; do { volatile uint32_t* b = &s->x.u.symqueue[batch * batch_size]; int batch_len = 0; if (t == 0) { while ((batch_len = s->x.batch_len[batch]) == 0) {} } batch_len = shuffle(batch_len); if (batch_len < 0) { break; } auto const symt = (t < batch_len) ? b[t] : 256; auto const lit_mask = ballot(symt >= 256); auto pos = min((__ffs(lit_mask) - 1) & 0xff, 32); if (t == 0) { s->x.batch_len[batch] = 0; } if (t < pos && out + t < outend) { out[t] = symt; } out += pos; batch_len -= pos; while (batch_len > 0) { int dist, len, symbol; // Process a non-literal symbol symbol = shuffle(symt, pos); len = max((symbol & 0xffff) - 256, 0); // max should be unnecessary, but just in case dist = symbol >> 16; for (int i = t; i < len; i += 32) { uint8_t const* src = out + ((i >= dist) ? (i % dist) : i) - dist; uint8_t b = (src < outbase) ? 0 : *src; if (out + i < outend) { out[i] = b; } } out += len; pos++; batch_len--; // Process subsequent literals, if any if (!((lit_mask >> pos) & 1)) { len = min((__ffs(lit_mask >> pos) - 1) & 0xff, batch_len); symbol = shuffle(symt, (pos + t) & 0x1f); if (t < len && out + t < outend) { out[t] = symbol; } out += len; pos += len; batch_len -= len; } } batch = (batch + 1) & (batch_count - 1); } while (true); if (t == 0) { s->out = out; } } /** * @brief Initializes a stored block. * * Format notes: * * - After the two-bit stored block type (00), the stored block length and * stored bytes are byte-aligned for fast copying. Therefore any leftover * bits in the byte that has the last bit of the type, as many as seven, are * discarded. The value of the discarded bits are not defined and should not * be checked against any expectation. * * - The second inverted copy of the stored block length does not have to be * checked, but it's probably a good idea to do so anyway. * * - A stored block can have zero length. This is sometimes used to byte-align * subsets of the compressed data for random access or partial recovery. */ __device__ int init_stored(inflate_state_s* s) { uint32_t len, nlen; // length of stored block // Byte align if (s->bitpos & 7) { skipbits(s, 8 - (s->bitpos & 7)); } if (s->cur + (s->bitpos >> 3) >= s->end) { return 2; // Not enough input } // get length and check against its one's complement len = getbits(s, 16); nlen = getbits(s, 16); if (len != (nlen ^ 0xffff)) { return -2; // didn't match complement! } if (s->cur + (s->bitpos >> 3) + len > s->end) { return 2; // Not enough input } s->stored_blk_len = len; // done with a valid stored block return 0; } /// Copy bytes from stored block to destination __device__ void copy_stored(inflate_state_s* s, int t) { auto len = s->stored_blk_len; auto cur = s->cur + s->bitpos / 8; auto out = s->out; auto outend = s->outend; auto const slow_bytes = min(len, (int)((16 - reinterpret_cast<size_t>(out)) % 16)); // Slow copy until output is 16B aligned if (slow_bytes) { for (int i = t; i < slow_bytes; i += blockDim.x) { if (out + i < outend) { out[i] = cur[i]; // Input range has already been validated in init_stored() } } cur += slow_bytes; out += slow_bytes; len -= slow_bytes; } auto fast_bytes = len; if (out < outend) { fast_bytes = (int)min((size_t)fast_bytes, (outend - out)); } fast_bytes &= ~0xf; auto bitpos = ((int)((size_t)cur % 4)) * 8; auto cur4 = cur - (bitpos / 8); if (out < outend) { // Fast copy 16 bytes at a time for (int i = t * 16; i < fast_bytes; i += blockDim.x * 16) { uint4 u; u.x = *reinterpret_cast<uint32_t const*>(cur4 + i + 0 * 4); u.y = *reinterpret_cast<uint32_t const*>(cur4 + i + 1 * 4); u.z = *reinterpret_cast<uint32_t const*>(cur4 + i + 2 * 4); u.w = *reinterpret_cast<uint32_t const*>(cur4 + i + 3 * 4); if (bitpos != 0) { uint32_t v = (bitpos != 0) ? *reinterpret_cast<uint32_t const*>(cur4 + i + 4 * 4) : 0; u.x = __funnelshift_rc(u.x, u.y, bitpos); u.y = __funnelshift_rc(u.y, u.z, bitpos); u.z = __funnelshift_rc(u.z, u.w, bitpos); u.w = __funnelshift_rc(u.w, v, bitpos); } *reinterpret_cast<uint4*>(out + i) = u; } } cur += fast_bytes; out += fast_bytes; len -= fast_bytes; // Slow copy for remaining bytes for (int i = t; i < len; i += blockDim.x) { if (out + i < outend) { out[i] = cur[i]; // Input range has already been validated in init_stored() } } out += len; __syncthreads(); if (t == 0) { // Reset bitstream to end of block auto p = cur + len; auto prefix_bytes = (uint32_t)(((size_t)p) & 3); p -= prefix_bytes; s->cur = p; s->bitbuf.x = (p < s->end) ? *reinterpret_cast<uint32_t const*>(p) : 0; p += 4; s->bitbuf.y = (p < s->end) ? *reinterpret_cast<uint32_t const*>(p) : 0; s->bitpos = prefix_bytes * 8; s->out = out; } } #if ENABLE_PREFETCH __device__ void init_prefetcher(inflate_state_s* s, int t) { if (t == 0) { s->pref.cur_p = s->cur; s->pref.run = 1; } } __device__ void prefetch_warp(volatile inflate_state_s* s, int t) { uint8_t const* cur_p = s->pref.cur_p; uint8_t const* end = s->end; while (shuffle((t == 0) ? s->pref.run : 0)) { auto cur_lo = (int32_t)(size_t)cur_p; int do_pref = shuffle((t == 0) ? (cur_lo - *(volatile int32_t*)&s->cur < prefetch_size - 32 * 4 - 4) : 0); if (do_pref) { uint8_t const* p = cur_p + 4 * t; *prefetch_addr32(s->pref, p) = (p < end) ? *reinterpret_cast<uint32_t const*>(p) : 0; cur_p += 4 * 32; __threadfence_block(); __syncwarp(); if (!t) { s->pref.cur_p = cur_p; __threadfence_block(); } } } } #endif // ENABLE_PREFETCH /** * @brief Parse GZIP header * See https://tools.ietf.org/html/rfc1952 */ __device__ int parse_gzip_header(uint8_t const* src, size_t src_size) { int hdr_len = -1; if (src_size >= 18) { uint32_t sig = (src[0] << 16) | (src[1] << 8) | src[2]; if (sig == 0x1f'8b08) // 24-bit GZIP inflate signature {0x1f, 0x8b, 0x08} { uint8_t flags = src[3]; hdr_len = 10; if (flags & GZIPHeaderFlag::fextra) // Extra fields present { int xlen = src[hdr_len] | (src[hdr_len + 1] << 8); hdr_len += xlen; if (hdr_len >= src_size) return -1; } if (flags & GZIPHeaderFlag::fname) // Original file name present { // Skip zero-terminated string do { if (hdr_len >= src_size) return -1; } while (src[hdr_len++] != 0); } if (flags & GZIPHeaderFlag::fcomment) // Comment present { // Skip zero-terminated string do { if (hdr_len >= src_size) return -1; } while (src[hdr_len++] != 0); } if (flags & GZIPHeaderFlag::fhcrc) // Header CRC present { hdr_len += 2; } if (hdr_len + 8 >= src_size) hdr_len = -1; } } return hdr_len; } /** * @brief INFLATE decompression kernel * * blockDim {block_size,1,1} * * @tparam block_size Thread block dimension for this call * @param inputs Source and destination buffer information per block * @param outputs Destination buffer information per block * @param results Decompression status buffer per block * @param parse_hdr If nonzero, indicates that the compressed bitstream includes a GZIP header */ template <int block_size> __global__ void __launch_bounds__(block_size) inflate_kernel(device_span<device_span<uint8_t const> const> inputs, device_span<device_span<uint8_t> const> outputs, device_span<compression_result> results, gzip_header_included parse_hdr) { __shared__ __align__(16) inflate_state_s state_g; int t = threadIdx.x; int z = blockIdx.x; inflate_state_s* state = &state_g; if (!t) { auto p = inputs[z].data(); auto src_size = inputs[z].size(); // Parse header if needed state->err = 0; if (parse_hdr == gzip_header_included::YES) { int hdr_len = parse_gzip_header(p, src_size); src_size = (src_size >= 8) ? src_size - 8 : 0; // ignore footer if (hdr_len >= 0) { p += hdr_len; src_size -= hdr_len; } else { state->err = hdr_len; } } // Initialize shared state state->out = outputs[z].data(); state->outbase = state->out; state->outend = state->out + outputs[z].size(); state->end = p + src_size; auto const prefix_bytes = (uint32_t)(((size_t)p) & 3); p -= prefix_bytes; state->cur = p; state->bitbuf.x = (p < state->end) ? *reinterpret_cast<uint32_t const*>(p) : 0; p += 4; state->bitbuf.y = (p < state->end) ? *reinterpret_cast<uint32_t const*>(p) : 0; state->bitpos = prefix_bytes * 8; } __syncthreads(); // Main loop decoding blocks while (!state->err) { if (!t) { // Thread0: read last flag, block type and custom huffman tables if any if (state->cur + (state->bitpos >> 3) >= state->end) state->err = 2; else { state->blast = getbits(state, 1); state->btype = getbits(state, 2); if (state->btype == 0) state->err = init_stored(state); else if (state->btype == 1) state->err = init_fixed(state); else if (state->btype == 2) state->err = init_dynamic(state); else state->err = -1; // Invalid block } } __syncthreads(); if (!state->err && (state->btype == 1 || state->btype == 2)) { // Initializes lookup tables (block wide) init_length_lut(state, t); init_distance_lut(state, t); #if ENABLE_PREFETCH // Initialize prefetcher init_prefetcher(state, t); #endif if (t < batch_count) { state->x.batch_len[t] = 0; } __syncthreads(); // decode data until end-of-block code if (t < 1 * 32) { // WARP0: decode variable-length symbols if (!t) { // Thread0: decode symbols (single threaded) decode_symbols(state); #if ENABLE_PREFETCH state->pref.run = 0; #endif } } else if (t < 2 * 32) { // WARP1: perform LZ77 using length and distance codes from WARP0 process_symbols(state, t & 0x1f); } #if ENABLE_PREFETCH else if (t < 3 * 32) { // WARP2: Prefetcher: prefetch data for WARP0 prefetch_warp(state, t & 0x1f); } #endif // else WARP3: idle } else if (!state->err && state->btype == 0) { // Uncompressed block (block-wide memcpy) copy_stored(state, t); } if (state->blast) break; __syncthreads(); } __syncthreads(); // Output decompression status and length if (!t) { if (state->err == 0 && state->cur + ((state->bitpos + 7) >> 3) > state->end) { // Read past the end of the input buffer state->err = 2; } else if (state->err == 0 && state->out > state->outend) { // Output buffer too small state->err = 1; } results[z].bytes_written = state->out - state->outbase; results[z].status = [&]() { switch (state->err) { case 0: return compression_status::SUCCESS; case 1: return compression_status::OUTPUT_OVERFLOW; default: return compression_status::FAILURE; } }(); results[z].reserved = (int)(state->end - state->cur); // Here mainly for debug purposes } } /** * @brief Copy a group of buffers * * blockDim {1024,1,1} * * @param inputs Source and destination information per block */ __global__ void __launch_bounds__(1024) copy_uncompressed_kernel(device_span<device_span<uint8_t const> const> inputs, device_span<device_span<uint8_t> const> outputs) { __shared__ uint8_t const* volatile src_g; __shared__ uint8_t* volatile dst_g; __shared__ uint32_t volatile copy_len_g; uint32_t t = threadIdx.x; uint32_t z = blockIdx.x; uint8_t const* src; uint8_t* dst; uint32_t len, src_align_bytes, src_align_bits, dst_align_bytes; if (!t) { src = inputs[z].data(); dst = outputs[z].data(); len = static_cast<uint32_t>(min(inputs[z].size(), outputs[z].size())); src_g = src; dst_g = dst; copy_len_g = len; } __syncthreads(); src = src_g; dst = dst_g; len = copy_len_g; // Align output to 32-bit dst_align_bytes = 3 & -reinterpret_cast<intptr_t>(dst); if (dst_align_bytes != 0) { uint32_t align_len = min(dst_align_bytes, len); if (t < align_len) { dst[t] = src[t]; } src += align_len; dst += align_len; len -= align_len; } src_align_bytes = (uint32_t)(3 & reinterpret_cast<uintptr_t>(src)); src_align_bits = src_align_bytes << 3; while (len >= 32) { auto const* src32 = reinterpret_cast<uint32_t const*>(src - src_align_bytes); uint32_t copy_cnt = min(len >> 2, 1024); if (t < copy_cnt) { uint32_t v = src32[t]; if (src_align_bits != 0) { v = __funnelshift_r(v, src32[t + 1], src_align_bits); } reinterpret_cast<uint32_t*>(dst)[t] = v; } src += copy_cnt * 4; dst += copy_cnt * 4; len -= copy_cnt * 4; } if (t < len) { dst[t] = src[t]; } } void gpuinflate(device_span<device_span<uint8_t const> const> inputs, device_span<device_span<uint8_t> const> outputs, device_span<compression_result> results, gzip_header_included parse_hdr, rmm::cuda_stream_view stream) { constexpr int block_size = 128; // Threads per block if (inputs.size() > 0) { inflate_kernel<block_size> <<<inputs.size(), block_size, 0, stream.value()>>>(inputs, outputs, results, parse_hdr); } } void gpu_copy_uncompressed_blocks(device_span<device_span<uint8_t const> const> inputs, device_span<device_span<uint8_t> const> outputs, rmm::cuda_stream_view stream) { if (inputs.size() > 0) { copy_uncompressed_kernel<<<inputs.size(), 1024, 0, stream.value()>>>(inputs, outputs); } } } // namespace io } // namespace cudf
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/comp/snap.cu
/* * Copyright (c) 2018-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "gpuinflate.hpp" #include <io/utilities/block_utils.cuh> #include <rmm/cuda_stream_view.hpp> namespace cudf { namespace io { constexpr int hash_bits = 12; // TBD: Tentatively limits to 2-byte codes to prevent long copy search followed by long literal // encoding /** * @brief snappy compressor state */ struct snap_state_s { uint8_t const* src; ///< Ptr to uncompressed data uint32_t src_len; ///< Uncompressed data length uint8_t* dst_base; ///< Base ptr to output compressed data uint8_t* dst; ///< Current ptr to uncompressed data uint8_t* end; ///< End of uncompressed data buffer volatile uint32_t literal_length; ///< Number of literal bytes volatile uint32_t copy_length; ///< Number of copy bytes volatile uint32_t copy_distance; ///< Distance for copy bytes uint16_t hash_map[1 << hash_bits]; ///< Low 16-bit offset from hash }; /** * @brief 12-bit hash from four consecutive bytes */ static inline __device__ uint32_t snap_hash(uint32_t v) { return (v * ((1 << 20) + (0x2a00) + (0x6a) + 1)) >> (32 - hash_bits); } /** * @brief Fetches four consecutive bytes */ static inline __device__ uint32_t fetch4(uint8_t const* src) { uint32_t src_align = 3 & reinterpret_cast<uintptr_t>(src); auto const* src32 = reinterpret_cast<uint32_t const*>(src - src_align); uint32_t v = src32[0]; return (src_align) ? __funnelshift_r(v, src32[1], src_align * 8) : v; } /** * @brief Outputs a snappy literal symbol * * @param dst Destination compressed byte stream * @param end End of compressed data buffer * @param src Pointer to literal bytes * @param len_minus1 Number of literal bytes minus 1 * @param t Thread in warp * * @return Updated pointer to compressed byte stream */ static __device__ uint8_t* StoreLiterals( uint8_t* dst, uint8_t* end, uint8_t const* src, uint32_t len_minus1, uint32_t t) { if (len_minus1 < 60) { if (!t && dst < end) dst[0] = (len_minus1 << 2); dst += 1; } else if (len_minus1 <= 0xff) { if (!t && dst + 1 < end) { dst[0] = 60 << 2; dst[1] = len_minus1; } dst += 2; } else if (len_minus1 <= 0xffff) { if (!t && dst + 2 < end) { dst[0] = 61 << 2; dst[1] = len_minus1; dst[2] = len_minus1 >> 8; } dst += 3; } else if (len_minus1 <= 0xff'ffff) { if (!t && dst + 3 < end) { dst[0] = 62 << 2; dst[1] = len_minus1; dst[2] = len_minus1 >> 8; dst[3] = len_minus1 >> 16; } dst += 4; } else { if (!t && dst + 4 < end) { dst[0] = 63 << 2; dst[1] = len_minus1; dst[2] = len_minus1 >> 8; dst[3] = len_minus1 >> 16; dst[4] = len_minus1 >> 24; } dst += 5; } for (uint32_t i = t; i <= len_minus1; i += 32) { if (dst + i < end) dst[i] = src[i]; } return dst + len_minus1 + 1; } /** * @brief Outputs a snappy copy symbol (assumed to be called by a single thread) * * @param dst Destination compressed byte stream * @param end End of compressed data buffer * @param copy_len Copy length * @param distance Copy distance * * @return Updated pointer to compressed byte stream */ static __device__ uint8_t* StoreCopy(uint8_t* dst, uint8_t* end, uint32_t copy_len, uint32_t distance) { if (copy_len < 12 && distance < 2048) { // xxxxxx01.oooooooo: copy with 3-bit length, 11-bit offset if (dst + 2 <= end) { dst[0] = ((distance & 0x700) >> 3) | ((copy_len - 4) << 2) | 0x01; dst[1] = distance; } return dst + 2; } else { // xxxxxx1x: copy with 6-bit length, 16-bit offset if (dst + 3 <= end) { dst[0] = ((copy_len - 1) << 2) | 0x2; dst[1] = distance; dst[2] = distance >> 8; } return dst + 3; } } /** * @brief Returns mask of any thread in the warp that has a hash value * equal to that of the calling thread */ static inline __device__ uint32_t HashMatchAny(uint32_t v, uint32_t t) { #if (__CUDA_ARCH__ >= 700) return __match_any_sync(~0, v); #else uint32_t err_map = 0; for (uint32_t i = 0; i < hash_bits; i++, v >>= 1) { uint32_t b = v & 1; uint32_t match_b = ballot(b); err_map |= match_b ^ -(int32_t)b; } return ~err_map; #endif } /** * @brief Finds the first occurrence of a consecutive 4-byte match in the input sequence, * or at most 256 bytes * * @param s Compressor state (copy_length set to 4 if a match is found, zero otherwise) * @param src Uncompressed buffer * @param pos0 Position in uncompressed buffer * @param t thread in warp * * @return Number of bytes before first match (literal length) */ static __device__ uint32_t FindFourByteMatch(snap_state_s* s, uint8_t const* src, uint32_t pos0, uint32_t t) { constexpr int max_literal_length = 256; // Matches encoder limit as described in snappy format description constexpr int max_copy_distance = 32768; uint32_t len = s->src_len; uint32_t pos = pos0; uint32_t maxpos = pos0 + max_literal_length - 31; uint32_t match_mask, literal_cnt; if (t == 0) { s->copy_length = 0; } do { bool valid4 = (pos + t + 4 <= len); uint32_t data32 = (valid4) ? fetch4(src + pos + t) : 0; uint32_t hash = (valid4) ? snap_hash(data32) : 0; uint32_t local_match = HashMatchAny(hash, t); uint32_t local_match_lane = 31 - __clz(local_match & ((1 << t) - 1)); uint32_t local_match_data = shuffle(data32, min(local_match_lane, t)); uint32_t offset, match; if (valid4) { if (local_match_lane < t && local_match_data == data32) { match = 1; offset = pos + local_match_lane; } else { offset = (pos & ~0xffff) | s->hash_map[hash]; if (offset >= pos) { offset = (offset >= 0x1'0000) ? offset - 0x1'0000 : pos; } match = (offset < pos && offset + max_copy_distance >= pos + t && fetch4(src + offset) == data32); } } else { match = 0; local_match = 0; offset = pos + t; } match_mask = ballot(match); if (match_mask != 0) { literal_cnt = __ffs(match_mask) - 1; if (t == literal_cnt) { s->copy_distance = pos + t - offset; s->copy_length = 4; } } else { literal_cnt = 32; } // Update hash up to the first 4 bytes of the copy length local_match &= (0x2 << literal_cnt) - 1; if (t <= literal_cnt && t == 31 - __clz(local_match)) { s->hash_map[hash] = pos + t; } pos += literal_cnt; } while (literal_cnt == 32 && pos < maxpos); return min(pos, len) - pos0; } /// @brief Returns the number of matching bytes for two byte sequences up to 63 bytes static __device__ uint32_t Match60(uint8_t const* src1, uint8_t const* src2, uint32_t len, uint32_t t) { uint32_t mismatch = ballot(t >= len || src1[t] != src2[t]); if (mismatch == 0) { mismatch = ballot(32 + t >= len || src1[32 + t] != src2[32 + t]); return 31 + __ffs(mismatch); // mismatch cannot be zero here if len <= 63 } else { return __ffs(mismatch) - 1; } } /** * @brief Snappy compression kernel * See http://github.com/google/snappy/blob/master/format_description.txt * * blockDim {128,1,1} * * @param[in] inputs Source/Destination buffer information per block * @param[out] outputs Compression status per block * @param[in] count Number of blocks to compress */ __global__ void __launch_bounds__(128) snap_kernel(device_span<device_span<uint8_t const> const> inputs, device_span<device_span<uint8_t> const> outputs, device_span<compression_result> results) { __shared__ __align__(16) snap_state_s state_g; snap_state_s* const s = &state_g; uint32_t t = threadIdx.x; uint32_t pos; uint8_t const* src; if (!t) { auto const src = inputs[blockIdx.x].data(); auto src_len = static_cast<uint32_t>(inputs[blockIdx.x].size()); auto dst = outputs[blockIdx.x].data(); auto const dst_len = static_cast<uint32_t>(outputs[blockIdx.x].size()); auto const end = dst + dst_len; s->src = src; s->src_len = src_len; s->dst_base = dst; s->end = end; while (src_len > 0x7f) { if (dst < end) { dst[0] = src_len | 0x80; } dst++; src_len >>= 7; } if (dst < end) { dst[0] = src_len; } s->dst = dst + 1; s->literal_length = 0; s->copy_length = 0; s->copy_distance = 0; } for (uint32_t i = t; i < sizeof(s->hash_map) / sizeof(uint32_t); i += 128) { *reinterpret_cast<volatile uint32_t*>(&s->hash_map[i * 2]) = 0; } __syncthreads(); src = s->src; pos = 0; while (pos < s->src_len) { uint32_t literal_len = s->literal_length; uint32_t copy_len = s->copy_length; uint32_t distance = s->copy_distance; __syncthreads(); if (t < 32) { // WARP0: Encode literals and copies uint8_t* dst = s->dst; uint8_t* end = s->end; if (literal_len > 0) { dst = StoreLiterals(dst, end, src + pos, literal_len - 1, t); pos += literal_len; } if (copy_len > 0) { if (t == 0) { dst = StoreCopy(dst, end, copy_len, distance); } pos += copy_len; } __syncwarp(); if (t == 0) { s->dst = dst; } } else { pos += literal_len + copy_len; if (t < 32 * 2) { // WARP1: Find a match using 12-bit hashes of 4-byte blocks uint32_t t5 = t & 0x1f; literal_len = FindFourByteMatch(s, src, pos, t5); if (t5 == 0) { s->literal_length = literal_len; } copy_len = s->copy_length; if (copy_len != 0) { uint32_t match_pos = pos + literal_len + copy_len; // NOTE: copy_len is always 4 here copy_len += Match60(src + match_pos, src + match_pos - s->copy_distance, min(s->src_len - match_pos, 64 - copy_len), t5); if (t5 == 0) { s->copy_length = copy_len; } } } } __syncthreads(); } __syncthreads(); if (!t) { results[blockIdx.x].bytes_written = s->dst - s->dst_base; results[blockIdx.x].status = (s->dst > s->end) ? compression_status::FAILURE : compression_status::SUCCESS; results[blockIdx.x].reserved = 0; } } void gpu_snap(device_span<device_span<uint8_t const> const> inputs, device_span<device_span<uint8_t> const> outputs, device_span<compression_result> results, rmm::cuda_stream_view stream) { dim3 dim_block(128, 1); // 4 warps per stream, 1 stream per block dim3 dim_grid(inputs.size(), 1); if (inputs.size() > 0) { snap_kernel<<<dim_grid, dim_block, 0, stream.value()>>>(inputs, outputs, results); } } } // namespace io } // namespace cudf
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/comp/nvcomp_adapter.cu
/* * Copyright (c) 2022-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "nvcomp_adapter.cuh" #include <cudf/detail/utilities/integer_utils.hpp> #include <rmm/exec_policy.hpp> #include <thrust/iterator/zip_iterator.h> #include <thrust/transform.h> #include <thrust/tuple.h> namespace cudf::io::nvcomp { batched_args create_batched_nvcomp_args(device_span<device_span<uint8_t const> const> inputs, device_span<device_span<uint8_t> const> outputs, rmm::cuda_stream_view stream) { auto const num_comp_chunks = inputs.size(); rmm::device_uvector<void const*> input_data_ptrs(num_comp_chunks, stream); rmm::device_uvector<size_t> input_data_sizes(num_comp_chunks, stream); rmm::device_uvector<void*> output_data_ptrs(num_comp_chunks, stream); rmm::device_uvector<size_t> output_data_sizes(num_comp_chunks, stream); // Prepare the input vectors auto ins_it = thrust::make_zip_iterator(input_data_ptrs.begin(), input_data_sizes.begin()); thrust::transform( rmm::exec_policy(stream), inputs.begin(), inputs.end(), ins_it, [] __device__(auto const& in) { return thrust::make_tuple(in.data(), in.size()); }); // Prepare the output vectors auto outs_it = thrust::make_zip_iterator(output_data_ptrs.begin(), output_data_sizes.begin()); thrust::transform( rmm::exec_policy(stream), outputs.begin(), outputs.end(), outs_it, [] __device__(auto const& out) { return thrust::make_tuple(out.data(), out.size()); }); return {std::move(input_data_ptrs), std::move(input_data_sizes), std::move(output_data_ptrs), std::move(output_data_sizes)}; } void update_compression_results(device_span<nvcompStatus_t const> nvcomp_stats, device_span<size_t const> actual_output_sizes, device_span<compression_result> results, rmm::cuda_stream_view stream) { thrust::transform_if( rmm::exec_policy(stream), nvcomp_stats.begin(), nvcomp_stats.end(), actual_output_sizes.begin(), results.begin(), results.begin(), [] __device__(auto const& nvcomp_status, auto const& size) { return compression_result{size, nvcomp_status == nvcompStatus_t::nvcompSuccess ? compression_status::SUCCESS : compression_status::FAILURE}; }, [] __device__(auto const& cudf_status) { return cudf_status.status != compression_status::SKIPPED; }); } void update_compression_results(device_span<size_t const> actual_output_sizes, device_span<compression_result> results, rmm::cuda_stream_view stream) { thrust::transform_if( rmm::exec_policy(stream), actual_output_sizes.begin(), actual_output_sizes.end(), results.begin(), results.begin(), [] __device__(auto const& size) { return compression_result{size}; }, [] __device__(auto const& results) { return results.status != compression_status::SKIPPED; }); } void skip_unsupported_inputs(device_span<size_t> input_sizes, device_span<compression_result> results, std::optional<size_t> max_valid_input_size, rmm::cuda_stream_view stream) { if (max_valid_input_size.has_value()) { auto status_size_it = thrust::make_zip_iterator(input_sizes.begin(), results.begin()); thrust::transform_if( rmm::exec_policy(stream), results.begin(), results.end(), input_sizes.begin(), status_size_it, [] __device__(auto const& status) { return thrust::pair{0, compression_result{0, compression_status::SKIPPED}}; }, [max_size = max_valid_input_size.value()] __device__(size_t input_size) { return input_size > max_size; }); } } std::pair<size_t, size_t> max_chunk_and_total_input_size(device_span<size_t const> input_sizes, rmm::cuda_stream_view stream) { auto const max = thrust::reduce(rmm::exec_policy(stream), input_sizes.begin(), input_sizes.end(), 0ul, thrust::maximum<size_t>()); auto const sum = thrust::reduce(rmm::exec_policy(stream), input_sizes.begin(), input_sizes.end()); return {max, sum}; } } // namespace cudf::io::nvcomp
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/comp/brotli_tables.hpp
/* * Copyright (c) 2018-2022, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * Portions of this file are derived from Google's Brotli project at * https://github.com/google/brotli, original license text below. */ /* Copyright 2013 Google Inc. All Rights Reserved. Distributed under MIT license. See file LICENSE for detail or copy at https://opensource.org/licenses/MIT */ /* Copyright(c) 2009, 2010, 2013 - 2016 by the Brotli Authors. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files(the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and / or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions : The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ #pragma once #ifndef CONSTANT #define CONSTANT static const #endif constexpr int brotli_num_block_len_symbols = 26; constexpr int brotli_num_literal_symbols = 256; constexpr int brotli_num_command_symbols = 704; CONSTANT uint8_t kReverseBits[1 << 8] = { 0x00, 0x80, 0x40, 0xC0, 0x20, 0xA0, 0x60, 0xE0, 0x10, 0x90, 0x50, 0xD0, 0x30, 0xB0, 0x70, 0xF0, 0x08, 0x88, 0x48, 0xC8, 0x28, 0xA8, 0x68, 0xE8, 0x18, 0x98, 0x58, 0xD8, 0x38, 0xB8, 0x78, 0xF8, 0x04, 0x84, 0x44, 0xC4, 0x24, 0xA4, 0x64, 0xE4, 0x14, 0x94, 0x54, 0xD4, 0x34, 0xB4, 0x74, 0xF4, 0x0C, 0x8C, 0x4C, 0xCC, 0x2C, 0xAC, 0x6C, 0xEC, 0x1C, 0x9C, 0x5C, 0xDC, 0x3C, 0xBC, 0x7C, 0xFC, 0x02, 0x82, 0x42, 0xC2, 0x22, 0xA2, 0x62, 0xE2, 0x12, 0x92, 0x52, 0xD2, 0x32, 0xB2, 0x72, 0xF2, 0x0A, 0x8A, 0x4A, 0xCA, 0x2A, 0xAA, 0x6A, 0xEA, 0x1A, 0x9A, 0x5A, 0xDA, 0x3A, 0xBA, 0x7A, 0xFA, 0x06, 0x86, 0x46, 0xC6, 0x26, 0xA6, 0x66, 0xE6, 0x16, 0x96, 0x56, 0xD6, 0x36, 0xB6, 0x76, 0xF6, 0x0E, 0x8E, 0x4E, 0xCE, 0x2E, 0xAE, 0x6E, 0xEE, 0x1E, 0x9E, 0x5E, 0xDE, 0x3E, 0xBE, 0x7E, 0xFE, 0x01, 0x81, 0x41, 0xC1, 0x21, 0xA1, 0x61, 0xE1, 0x11, 0x91, 0x51, 0xD1, 0x31, 0xB1, 0x71, 0xF1, 0x09, 0x89, 0x49, 0xC9, 0x29, 0xA9, 0x69, 0xE9, 0x19, 0x99, 0x59, 0xD9, 0x39, 0xB9, 0x79, 0xF9, 0x05, 0x85, 0x45, 0xC5, 0x25, 0xA5, 0x65, 0xE5, 0x15, 0x95, 0x55, 0xD5, 0x35, 0xB5, 0x75, 0xF5, 0x0D, 0x8D, 0x4D, 0xCD, 0x2D, 0xAD, 0x6D, 0xED, 0x1D, 0x9D, 0x5D, 0xDD, 0x3D, 0xBD, 0x7D, 0xFD, 0x03, 0x83, 0x43, 0xC3, 0x23, 0xA3, 0x63, 0xE3, 0x13, 0x93, 0x53, 0xD3, 0x33, 0xB3, 0x73, 0xF3, 0x0B, 0x8B, 0x4B, 0xCB, 0x2B, 0xAB, 0x6B, 0xEB, 0x1B, 0x9B, 0x5B, 0xDB, 0x3B, 0xBB, 0x7B, 0xFB, 0x07, 0x87, 0x47, 0xC7, 0x27, 0xA7, 0x67, 0xE7, 0x17, 0x97, 0x57, 0xD7, 0x37, 0xB7, 0x77, 0xF7, 0x0F, 0x8F, 0x4F, 0xCF, 0x2F, 0xAF, 0x6F, 0xEF, 0x1F, 0x9F, 0x5F, 0xDF, 0x3F, 0xBF, 0x7F, 0xFF}; /* Common context lookup table for all context modes. */ CONSTANT uint8_t kContextLookup[2048] = { /* CONTEXT_LSB6, last byte. */ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, /* CONTEXT_LSB6, second last byte, */ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* CONTEXT_MSB6, last byte. */ 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 6, 6, 6, 6, 7, 7, 7, 7, 8, 8, 8, 8, 9, 9, 9, 9, 10, 10, 10, 10, 11, 11, 11, 11, 12, 12, 12, 12, 13, 13, 13, 13, 14, 14, 14, 14, 15, 15, 15, 15, 16, 16, 16, 16, 17, 17, 17, 17, 18, 18, 18, 18, 19, 19, 19, 19, 20, 20, 20, 20, 21, 21, 21, 21, 22, 22, 22, 22, 23, 23, 23, 23, 24, 24, 24, 24, 25, 25, 25, 25, 26, 26, 26, 26, 27, 27, 27, 27, 28, 28, 28, 28, 29, 29, 29, 29, 30, 30, 30, 30, 31, 31, 31, 31, 32, 32, 32, 32, 33, 33, 33, 33, 34, 34, 34, 34, 35, 35, 35, 35, 36, 36, 36, 36, 37, 37, 37, 37, 38, 38, 38, 38, 39, 39, 39, 39, 40, 40, 40, 40, 41, 41, 41, 41, 42, 42, 42, 42, 43, 43, 43, 43, 44, 44, 44, 44, 45, 45, 45, 45, 46, 46, 46, 46, 47, 47, 47, 47, 48, 48, 48, 48, 49, 49, 49, 49, 50, 50, 50, 50, 51, 51, 51, 51, 52, 52, 52, 52, 53, 53, 53, 53, 54, 54, 54, 54, 55, 55, 55, 55, 56, 56, 56, 56, 57, 57, 57, 57, 58, 58, 58, 58, 59, 59, 59, 59, 60, 60, 60, 60, 61, 61, 61, 61, 62, 62, 62, 62, 63, 63, 63, 63, /* CONTEXT_MSB6, second last byte, */ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* CONTEXT_UTF8, last byte. */ /* ASCII range. */ 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, 12, 16, 12, 12, 20, 12, 16, 24, 28, 12, 12, 32, 12, 36, 12, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 32, 32, 24, 40, 28, 12, 12, 48, 52, 52, 52, 48, 52, 52, 52, 48, 52, 52, 52, 52, 52, 48, 52, 52, 52, 52, 52, 48, 52, 52, 52, 52, 52, 24, 12, 28, 12, 12, 12, 56, 60, 60, 60, 56, 60, 60, 60, 56, 60, 60, 60, 60, 60, 56, 60, 60, 60, 60, 60, 56, 60, 60, 60, 60, 60, 24, 12, 28, 12, 0, /* UTF8 continuation byte range. */ 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, /* UTF8 lead byte range. */ 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, /* CONTEXT_UTF8 second last byte. */ /* ASCII range. */ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1, 1, 1, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 1, 1, 1, 1, 0, /* UTF8 continuation byte range. */ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* UTF8 lead byte range. */ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, /* CONTEXT_SIGNED, last byte, same as the above values shifted by 3 bits. */ 0, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 56, /* CONTEXT_SIGNED, second last byte. */ 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 7, }; using CmdLutElement = struct CmdLutElement { uint8_t insert_len_extra_bits; uint8_t copy_len_extra_bits; int8_t distance_code; uint8_t context; uint16_t insert_len_offset; uint16_t copy_len_offset; }; CONSTANT CmdLutElement kCmdLut[brotli_num_command_symbols] = { {0x00, 0x00, 0, 0x00, 0x0000, 0x0002}, {0x00, 0x00, 0, 0x01, 0x0000, 0x0003}, {0x00, 0x00, 0, 0x02, 0x0000, 0x0004}, {0x00, 0x00, 0, 0x03, 0x0000, 0x0005}, {0x00, 0x00, 0, 0x03, 0x0000, 0x0006}, {0x00, 0x00, 0, 0x03, 0x0000, 0x0007}, {0x00, 0x00, 0, 0x03, 0x0000, 0x0008}, {0x00, 0x00, 0, 0x03, 0x0000, 0x0009}, {0x00, 0x00, 0, 0x00, 0x0001, 0x0002}, {0x00, 0x00, 0, 0x01, 0x0001, 0x0003}, {0x00, 0x00, 0, 0x02, 0x0001, 0x0004}, {0x00, 0x00, 0, 0x03, 0x0001, 0x0005}, {0x00, 0x00, 0, 0x03, 0x0001, 0x0006}, {0x00, 0x00, 0, 0x03, 0x0001, 0x0007}, {0x00, 0x00, 0, 0x03, 0x0001, 0x0008}, {0x00, 0x00, 0, 0x03, 0x0001, 0x0009}, {0x00, 0x00, 0, 0x00, 0x0002, 0x0002}, {0x00, 0x00, 0, 0x01, 0x0002, 0x0003}, {0x00, 0x00, 0, 0x02, 0x0002, 0x0004}, {0x00, 0x00, 0, 0x03, 0x0002, 0x0005}, {0x00, 0x00, 0, 0x03, 0x0002, 0x0006}, {0x00, 0x00, 0, 0x03, 0x0002, 0x0007}, {0x00, 0x00, 0, 0x03, 0x0002, 0x0008}, {0x00, 0x00, 0, 0x03, 0x0002, 0x0009}, {0x00, 0x00, 0, 0x00, 0x0003, 0x0002}, {0x00, 0x00, 0, 0x01, 0x0003, 0x0003}, {0x00, 0x00, 0, 0x02, 0x0003, 0x0004}, {0x00, 0x00, 0, 0x03, 0x0003, 0x0005}, {0x00, 0x00, 0, 0x03, 0x0003, 0x0006}, {0x00, 0x00, 0, 0x03, 0x0003, 0x0007}, {0x00, 0x00, 0, 0x03, 0x0003, 0x0008}, {0x00, 0x00, 0, 0x03, 0x0003, 0x0009}, {0x00, 0x00, 0, 0x00, 0x0004, 0x0002}, {0x00, 0x00, 0, 0x01, 0x0004, 0x0003}, {0x00, 0x00, 0, 0x02, 0x0004, 0x0004}, {0x00, 0x00, 0, 0x03, 0x0004, 0x0005}, {0x00, 0x00, 0, 0x03, 0x0004, 0x0006}, {0x00, 0x00, 0, 0x03, 0x0004, 0x0007}, {0x00, 0x00, 0, 0x03, 0x0004, 0x0008}, {0x00, 0x00, 0, 0x03, 0x0004, 0x0009}, {0x00, 0x00, 0, 0x00, 0x0005, 0x0002}, {0x00, 0x00, 0, 0x01, 0x0005, 0x0003}, {0x00, 0x00, 0, 0x02, 0x0005, 0x0004}, {0x00, 0x00, 0, 0x03, 0x0005, 0x0005}, {0x00, 0x00, 0, 0x03, 0x0005, 0x0006}, {0x00, 0x00, 0, 0x03, 0x0005, 0x0007}, {0x00, 0x00, 0, 0x03, 0x0005, 0x0008}, {0x00, 0x00, 0, 0x03, 0x0005, 0x0009}, {0x01, 0x00, 0, 0x00, 0x0006, 0x0002}, {0x01, 0x00, 0, 0x01, 0x0006, 0x0003}, {0x01, 0x00, 0, 0x02, 0x0006, 0x0004}, {0x01, 0x00, 0, 0x03, 0x0006, 0x0005}, {0x01, 0x00, 0, 0x03, 0x0006, 0x0006}, {0x01, 0x00, 0, 0x03, 0x0006, 0x0007}, {0x01, 0x00, 0, 0x03, 0x0006, 0x0008}, {0x01, 0x00, 0, 0x03, 0x0006, 0x0009}, {0x01, 0x00, 0, 0x00, 0x0008, 0x0002}, {0x01, 0x00, 0, 0x01, 0x0008, 0x0003}, {0x01, 0x00, 0, 0x02, 0x0008, 0x0004}, {0x01, 0x00, 0, 0x03, 0x0008, 0x0005}, {0x01, 0x00, 0, 0x03, 0x0008, 0x0006}, {0x01, 0x00, 0, 0x03, 0x0008, 0x0007}, {0x01, 0x00, 0, 0x03, 0x0008, 0x0008}, {0x01, 0x00, 0, 0x03, 0x0008, 0x0009}, {0x00, 0x01, 0, 0x03, 0x0000, 0x000a}, {0x00, 0x01, 0, 0x03, 0x0000, 0x000c}, {0x00, 0x02, 0, 0x03, 0x0000, 0x000e}, {0x00, 0x02, 0, 0x03, 0x0000, 0x0012}, {0x00, 0x03, 0, 0x03, 0x0000, 0x0016}, {0x00, 0x03, 0, 0x03, 0x0000, 0x001e}, {0x00, 0x04, 0, 0x03, 0x0000, 0x0026}, {0x00, 0x04, 0, 0x03, 0x0000, 0x0036}, {0x00, 0x01, 0, 0x03, 0x0001, 0x000a}, {0x00, 0x01, 0, 0x03, 0x0001, 0x000c}, {0x00, 0x02, 0, 0x03, 0x0001, 0x000e}, {0x00, 0x02, 0, 0x03, 0x0001, 0x0012}, {0x00, 0x03, 0, 0x03, 0x0001, 0x0016}, {0x00, 0x03, 0, 0x03, 0x0001, 0x001e}, {0x00, 0x04, 0, 0x03, 0x0001, 0x0026}, {0x00, 0x04, 0, 0x03, 0x0001, 0x0036}, {0x00, 0x01, 0, 0x03, 0x0002, 0x000a}, {0x00, 0x01, 0, 0x03, 0x0002, 0x000c}, {0x00, 0x02, 0, 0x03, 0x0002, 0x000e}, {0x00, 0x02, 0, 0x03, 0x0002, 0x0012}, {0x00, 0x03, 0, 0x03, 0x0002, 0x0016}, {0x00, 0x03, 0, 0x03, 0x0002, 0x001e}, {0x00, 0x04, 0, 0x03, 0x0002, 0x0026}, {0x00, 0x04, 0, 0x03, 0x0002, 0x0036}, {0x00, 0x01, 0, 0x03, 0x0003, 0x000a}, {0x00, 0x01, 0, 0x03, 0x0003, 0x000c}, {0x00, 0x02, 0, 0x03, 0x0003, 0x000e}, {0x00, 0x02, 0, 0x03, 0x0003, 0x0012}, {0x00, 0x03, 0, 0x03, 0x0003, 0x0016}, {0x00, 0x03, 0, 0x03, 0x0003, 0x001e}, {0x00, 0x04, 0, 0x03, 0x0003, 0x0026}, {0x00, 0x04, 0, 0x03, 0x0003, 0x0036}, {0x00, 0x01, 0, 0x03, 0x0004, 0x000a}, {0x00, 0x01, 0, 0x03, 0x0004, 0x000c}, {0x00, 0x02, 0, 0x03, 0x0004, 0x000e}, {0x00, 0x02, 0, 0x03, 0x0004, 0x0012}, {0x00, 0x03, 0, 0x03, 0x0004, 0x0016}, {0x00, 0x03, 0, 0x03, 0x0004, 0x001e}, {0x00, 0x04, 0, 0x03, 0x0004, 0x0026}, {0x00, 0x04, 0, 0x03, 0x0004, 0x0036}, {0x00, 0x01, 0, 0x03, 0x0005, 0x000a}, {0x00, 0x01, 0, 0x03, 0x0005, 0x000c}, {0x00, 0x02, 0, 0x03, 0x0005, 0x000e}, {0x00, 0x02, 0, 0x03, 0x0005, 0x0012}, {0x00, 0x03, 0, 0x03, 0x0005, 0x0016}, {0x00, 0x03, 0, 0x03, 0x0005, 0x001e}, {0x00, 0x04, 0, 0x03, 0x0005, 0x0026}, {0x00, 0x04, 0, 0x03, 0x0005, 0x0036}, {0x01, 0x01, 0, 0x03, 0x0006, 0x000a}, {0x01, 0x01, 0, 0x03, 0x0006, 0x000c}, {0x01, 0x02, 0, 0x03, 0x0006, 0x000e}, {0x01, 0x02, 0, 0x03, 0x0006, 0x0012}, {0x01, 0x03, 0, 0x03, 0x0006, 0x0016}, {0x01, 0x03, 0, 0x03, 0x0006, 0x001e}, {0x01, 0x04, 0, 0x03, 0x0006, 0x0026}, {0x01, 0x04, 0, 0x03, 0x0006, 0x0036}, {0x01, 0x01, 0, 0x03, 0x0008, 0x000a}, {0x01, 0x01, 0, 0x03, 0x0008, 0x000c}, {0x01, 0x02, 0, 0x03, 0x0008, 0x000e}, {0x01, 0x02, 0, 0x03, 0x0008, 0x0012}, {0x01, 0x03, 0, 0x03, 0x0008, 0x0016}, {0x01, 0x03, 0, 0x03, 0x0008, 0x001e}, {0x01, 0x04, 0, 0x03, 0x0008, 0x0026}, {0x01, 0x04, 0, 0x03, 0x0008, 0x0036}, {0x00, 0x00, -1, 0x00, 0x0000, 0x0002}, {0x00, 0x00, -1, 0x01, 0x0000, 0x0003}, {0x00, 0x00, -1, 0x02, 0x0000, 0x0004}, {0x00, 0x00, -1, 0x03, 0x0000, 0x0005}, {0x00, 0x00, -1, 0x03, 0x0000, 0x0006}, {0x00, 0x00, -1, 0x03, 0x0000, 0x0007}, {0x00, 0x00, -1, 0x03, 0x0000, 0x0008}, {0x00, 0x00, -1, 0x03, 0x0000, 0x0009}, {0x00, 0x00, -1, 0x00, 0x0001, 0x0002}, {0x00, 0x00, -1, 0x01, 0x0001, 0x0003}, {0x00, 0x00, -1, 0x02, 0x0001, 0x0004}, {0x00, 0x00, -1, 0x03, 0x0001, 0x0005}, {0x00, 0x00, -1, 0x03, 0x0001, 0x0006}, {0x00, 0x00, -1, 0x03, 0x0001, 0x0007}, {0x00, 0x00, -1, 0x03, 0x0001, 0x0008}, {0x00, 0x00, -1, 0x03, 0x0001, 0x0009}, {0x00, 0x00, -1, 0x00, 0x0002, 0x0002}, {0x00, 0x00, -1, 0x01, 0x0002, 0x0003}, {0x00, 0x00, -1, 0x02, 0x0002, 0x0004}, {0x00, 0x00, -1, 0x03, 0x0002, 0x0005}, {0x00, 0x00, -1, 0x03, 0x0002, 0x0006}, {0x00, 0x00, -1, 0x03, 0x0002, 0x0007}, {0x00, 0x00, -1, 0x03, 0x0002, 0x0008}, {0x00, 0x00, -1, 0x03, 0x0002, 0x0009}, {0x00, 0x00, -1, 0x00, 0x0003, 0x0002}, {0x00, 0x00, -1, 0x01, 0x0003, 0x0003}, {0x00, 0x00, -1, 0x02, 0x0003, 0x0004}, {0x00, 0x00, -1, 0x03, 0x0003, 0x0005}, {0x00, 0x00, -1, 0x03, 0x0003, 0x0006}, {0x00, 0x00, -1, 0x03, 0x0003, 0x0007}, {0x00, 0x00, -1, 0x03, 0x0003, 0x0008}, {0x00, 0x00, -1, 0x03, 0x0003, 0x0009}, {0x00, 0x00, -1, 0x00, 0x0004, 0x0002}, {0x00, 0x00, -1, 0x01, 0x0004, 0x0003}, {0x00, 0x00, -1, 0x02, 0x0004, 0x0004}, {0x00, 0x00, -1, 0x03, 0x0004, 0x0005}, {0x00, 0x00, -1, 0x03, 0x0004, 0x0006}, {0x00, 0x00, -1, 0x03, 0x0004, 0x0007}, {0x00, 0x00, -1, 0x03, 0x0004, 0x0008}, {0x00, 0x00, -1, 0x03, 0x0004, 0x0009}, {0x00, 0x00, -1, 0x00, 0x0005, 0x0002}, {0x00, 0x00, -1, 0x01, 0x0005, 0x0003}, {0x00, 0x00, -1, 0x02, 0x0005, 0x0004}, {0x00, 0x00, -1, 0x03, 0x0005, 0x0005}, {0x00, 0x00, -1, 0x03, 0x0005, 0x0006}, {0x00, 0x00, -1, 0x03, 0x0005, 0x0007}, {0x00, 0x00, -1, 0x03, 0x0005, 0x0008}, {0x00, 0x00, -1, 0x03, 0x0005, 0x0009}, {0x01, 0x00, -1, 0x00, 0x0006, 0x0002}, {0x01, 0x00, -1, 0x01, 0x0006, 0x0003}, {0x01, 0x00, -1, 0x02, 0x0006, 0x0004}, {0x01, 0x00, -1, 0x03, 0x0006, 0x0005}, {0x01, 0x00, -1, 0x03, 0x0006, 0x0006}, {0x01, 0x00, -1, 0x03, 0x0006, 0x0007}, {0x01, 0x00, -1, 0x03, 0x0006, 0x0008}, {0x01, 0x00, -1, 0x03, 0x0006, 0x0009}, {0x01, 0x00, -1, 0x00, 0x0008, 0x0002}, {0x01, 0x00, -1, 0x01, 0x0008, 0x0003}, {0x01, 0x00, -1, 0x02, 0x0008, 0x0004}, {0x01, 0x00, -1, 0x03, 0x0008, 0x0005}, {0x01, 0x00, -1, 0x03, 0x0008, 0x0006}, {0x01, 0x00, -1, 0x03, 0x0008, 0x0007}, {0x01, 0x00, -1, 0x03, 0x0008, 0x0008}, {0x01, 0x00, -1, 0x03, 0x0008, 0x0009}, {0x00, 0x01, -1, 0x03, 0x0000, 0x000a}, {0x00, 0x01, -1, 0x03, 0x0000, 0x000c}, {0x00, 0x02, -1, 0x03, 0x0000, 0x000e}, {0x00, 0x02, -1, 0x03, 0x0000, 0x0012}, {0x00, 0x03, -1, 0x03, 0x0000, 0x0016}, {0x00, 0x03, -1, 0x03, 0x0000, 0x001e}, {0x00, 0x04, -1, 0x03, 0x0000, 0x0026}, {0x00, 0x04, -1, 0x03, 0x0000, 0x0036}, {0x00, 0x01, -1, 0x03, 0x0001, 0x000a}, {0x00, 0x01, -1, 0x03, 0x0001, 0x000c}, {0x00, 0x02, -1, 0x03, 0x0001, 0x000e}, {0x00, 0x02, -1, 0x03, 0x0001, 0x0012}, {0x00, 0x03, -1, 0x03, 0x0001, 0x0016}, {0x00, 0x03, -1, 0x03, 0x0001, 0x001e}, {0x00, 0x04, -1, 0x03, 0x0001, 0x0026}, {0x00, 0x04, -1, 0x03, 0x0001, 0x0036}, {0x00, 0x01, -1, 0x03, 0x0002, 0x000a}, {0x00, 0x01, -1, 0x03, 0x0002, 0x000c}, {0x00, 0x02, -1, 0x03, 0x0002, 0x000e}, {0x00, 0x02, -1, 0x03, 0x0002, 0x0012}, {0x00, 0x03, -1, 0x03, 0x0002, 0x0016}, {0x00, 0x03, -1, 0x03, 0x0002, 0x001e}, {0x00, 0x04, -1, 0x03, 0x0002, 0x0026}, {0x00, 0x04, -1, 0x03, 0x0002, 0x0036}, {0x00, 0x01, -1, 0x03, 0x0003, 0x000a}, {0x00, 0x01, -1, 0x03, 0x0003, 0x000c}, {0x00, 0x02, -1, 0x03, 0x0003, 0x000e}, {0x00, 0x02, -1, 0x03, 0x0003, 0x0012}, {0x00, 0x03, -1, 0x03, 0x0003, 0x0016}, {0x00, 0x03, -1, 0x03, 0x0003, 0x001e}, {0x00, 0x04, -1, 0x03, 0x0003, 0x0026}, {0x00, 0x04, -1, 0x03, 0x0003, 0x0036}, {0x00, 0x01, -1, 0x03, 0x0004, 0x000a}, {0x00, 0x01, -1, 0x03, 0x0004, 0x000c}, {0x00, 0x02, -1, 0x03, 0x0004, 0x000e}, {0x00, 0x02, -1, 0x03, 0x0004, 0x0012}, {0x00, 0x03, -1, 0x03, 0x0004, 0x0016}, {0x00, 0x03, -1, 0x03, 0x0004, 0x001e}, {0x00, 0x04, -1, 0x03, 0x0004, 0x0026}, {0x00, 0x04, -1, 0x03, 0x0004, 0x0036}, {0x00, 0x01, -1, 0x03, 0x0005, 0x000a}, {0x00, 0x01, -1, 0x03, 0x0005, 0x000c}, {0x00, 0x02, -1, 0x03, 0x0005, 0x000e}, {0x00, 0x02, -1, 0x03, 0x0005, 0x0012}, {0x00, 0x03, -1, 0x03, 0x0005, 0x0016}, {0x00, 0x03, -1, 0x03, 0x0005, 0x001e}, {0x00, 0x04, -1, 0x03, 0x0005, 0x0026}, {0x00, 0x04, -1, 0x03, 0x0005, 0x0036}, {0x01, 0x01, -1, 0x03, 0x0006, 0x000a}, {0x01, 0x01, -1, 0x03, 0x0006, 0x000c}, {0x01, 0x02, -1, 0x03, 0x0006, 0x000e}, {0x01, 0x02, -1, 0x03, 0x0006, 0x0012}, {0x01, 0x03, -1, 0x03, 0x0006, 0x0016}, {0x01, 0x03, -1, 0x03, 0x0006, 0x001e}, {0x01, 0x04, -1, 0x03, 0x0006, 0x0026}, {0x01, 0x04, -1, 0x03, 0x0006, 0x0036}, {0x01, 0x01, -1, 0x03, 0x0008, 0x000a}, {0x01, 0x01, -1, 0x03, 0x0008, 0x000c}, {0x01, 0x02, -1, 0x03, 0x0008, 0x000e}, {0x01, 0x02, -1, 0x03, 0x0008, 0x0012}, {0x01, 0x03, -1, 0x03, 0x0008, 0x0016}, {0x01, 0x03, -1, 0x03, 0x0008, 0x001e}, {0x01, 0x04, -1, 0x03, 0x0008, 0x0026}, {0x01, 0x04, -1, 0x03, 0x0008, 0x0036}, {0x02, 0x00, -1, 0x00, 0x000a, 0x0002}, {0x02, 0x00, -1, 0x01, 0x000a, 0x0003}, {0x02, 0x00, -1, 0x02, 0x000a, 0x0004}, {0x02, 0x00, -1, 0x03, 0x000a, 0x0005}, {0x02, 0x00, -1, 0x03, 0x000a, 0x0006}, {0x02, 0x00, -1, 0x03, 0x000a, 0x0007}, {0x02, 0x00, -1, 0x03, 0x000a, 0x0008}, {0x02, 0x00, -1, 0x03, 0x000a, 0x0009}, {0x02, 0x00, -1, 0x00, 0x000e, 0x0002}, {0x02, 0x00, -1, 0x01, 0x000e, 0x0003}, {0x02, 0x00, -1, 0x02, 0x000e, 0x0004}, {0x02, 0x00, -1, 0x03, 0x000e, 0x0005}, {0x02, 0x00, -1, 0x03, 0x000e, 0x0006}, {0x02, 0x00, -1, 0x03, 0x000e, 0x0007}, {0x02, 0x00, -1, 0x03, 0x000e, 0x0008}, {0x02, 0x00, -1, 0x03, 0x000e, 0x0009}, {0x03, 0x00, -1, 0x00, 0x0012, 0x0002}, {0x03, 0x00, -1, 0x01, 0x0012, 0x0003}, {0x03, 0x00, -1, 0x02, 0x0012, 0x0004}, {0x03, 0x00, -1, 0x03, 0x0012, 0x0005}, {0x03, 0x00, -1, 0x03, 0x0012, 0x0006}, {0x03, 0x00, -1, 0x03, 0x0012, 0x0007}, {0x03, 0x00, -1, 0x03, 0x0012, 0x0008}, {0x03, 0x00, -1, 0x03, 0x0012, 0x0009}, {0x03, 0x00, -1, 0x00, 0x001a, 0x0002}, {0x03, 0x00, -1, 0x01, 0x001a, 0x0003}, {0x03, 0x00, -1, 0x02, 0x001a, 0x0004}, {0x03, 0x00, -1, 0x03, 0x001a, 0x0005}, {0x03, 0x00, -1, 0x03, 0x001a, 0x0006}, {0x03, 0x00, -1, 0x03, 0x001a, 0x0007}, {0x03, 0x00, -1, 0x03, 0x001a, 0x0008}, {0x03, 0x00, -1, 0x03, 0x001a, 0x0009}, {0x04, 0x00, -1, 0x00, 0x0022, 0x0002}, {0x04, 0x00, -1, 0x01, 0x0022, 0x0003}, {0x04, 0x00, -1, 0x02, 0x0022, 0x0004}, {0x04, 0x00, -1, 0x03, 0x0022, 0x0005}, {0x04, 0x00, -1, 0x03, 0x0022, 0x0006}, {0x04, 0x00, -1, 0x03, 0x0022, 0x0007}, {0x04, 0x00, -1, 0x03, 0x0022, 0x0008}, {0x04, 0x00, -1, 0x03, 0x0022, 0x0009}, {0x04, 0x00, -1, 0x00, 0x0032, 0x0002}, {0x04, 0x00, -1, 0x01, 0x0032, 0x0003}, {0x04, 0x00, -1, 0x02, 0x0032, 0x0004}, {0x04, 0x00, -1, 0x03, 0x0032, 0x0005}, {0x04, 0x00, -1, 0x03, 0x0032, 0x0006}, {0x04, 0x00, -1, 0x03, 0x0032, 0x0007}, {0x04, 0x00, -1, 0x03, 0x0032, 0x0008}, {0x04, 0x00, -1, 0x03, 0x0032, 0x0009}, {0x05, 0x00, -1, 0x00, 0x0042, 0x0002}, {0x05, 0x00, -1, 0x01, 0x0042, 0x0003}, {0x05, 0x00, -1, 0x02, 0x0042, 0x0004}, {0x05, 0x00, -1, 0x03, 0x0042, 0x0005}, {0x05, 0x00, -1, 0x03, 0x0042, 0x0006}, {0x05, 0x00, -1, 0x03, 0x0042, 0x0007}, {0x05, 0x00, -1, 0x03, 0x0042, 0x0008}, {0x05, 0x00, -1, 0x03, 0x0042, 0x0009}, {0x05, 0x00, -1, 0x00, 0x0062, 0x0002}, {0x05, 0x00, -1, 0x01, 0x0062, 0x0003}, {0x05, 0x00, -1, 0x02, 0x0062, 0x0004}, {0x05, 0x00, -1, 0x03, 0x0062, 0x0005}, {0x05, 0x00, -1, 0x03, 0x0062, 0x0006}, {0x05, 0x00, -1, 0x03, 0x0062, 0x0007}, {0x05, 0x00, -1, 0x03, 0x0062, 0x0008}, {0x05, 0x00, -1, 0x03, 0x0062, 0x0009}, {0x02, 0x01, -1, 0x03, 0x000a, 0x000a}, {0x02, 0x01, -1, 0x03, 0x000a, 0x000c}, {0x02, 0x02, -1, 0x03, 0x000a, 0x000e}, {0x02, 0x02, -1, 0x03, 0x000a, 0x0012}, {0x02, 0x03, -1, 0x03, 0x000a, 0x0016}, {0x02, 0x03, -1, 0x03, 0x000a, 0x001e}, {0x02, 0x04, -1, 0x03, 0x000a, 0x0026}, {0x02, 0x04, -1, 0x03, 0x000a, 0x0036}, {0x02, 0x01, -1, 0x03, 0x000e, 0x000a}, {0x02, 0x01, -1, 0x03, 0x000e, 0x000c}, {0x02, 0x02, -1, 0x03, 0x000e, 0x000e}, {0x02, 0x02, -1, 0x03, 0x000e, 0x0012}, {0x02, 0x03, -1, 0x03, 0x000e, 0x0016}, {0x02, 0x03, -1, 0x03, 0x000e, 0x001e}, {0x02, 0x04, -1, 0x03, 0x000e, 0x0026}, {0x02, 0x04, -1, 0x03, 0x000e, 0x0036}, {0x03, 0x01, -1, 0x03, 0x0012, 0x000a}, {0x03, 0x01, -1, 0x03, 0x0012, 0x000c}, {0x03, 0x02, -1, 0x03, 0x0012, 0x000e}, {0x03, 0x02, -1, 0x03, 0x0012, 0x0012}, {0x03, 0x03, -1, 0x03, 0x0012, 0x0016}, {0x03, 0x03, -1, 0x03, 0x0012, 0x001e}, {0x03, 0x04, -1, 0x03, 0x0012, 0x0026}, {0x03, 0x04, -1, 0x03, 0x0012, 0x0036}, {0x03, 0x01, -1, 0x03, 0x001a, 0x000a}, {0x03, 0x01, -1, 0x03, 0x001a, 0x000c}, {0x03, 0x02, -1, 0x03, 0x001a, 0x000e}, {0x03, 0x02, -1, 0x03, 0x001a, 0x0012}, {0x03, 0x03, -1, 0x03, 0x001a, 0x0016}, {0x03, 0x03, -1, 0x03, 0x001a, 0x001e}, {0x03, 0x04, -1, 0x03, 0x001a, 0x0026}, {0x03, 0x04, -1, 0x03, 0x001a, 0x0036}, {0x04, 0x01, -1, 0x03, 0x0022, 0x000a}, {0x04, 0x01, -1, 0x03, 0x0022, 0x000c}, {0x04, 0x02, -1, 0x03, 0x0022, 0x000e}, {0x04, 0x02, -1, 0x03, 0x0022, 0x0012}, {0x04, 0x03, -1, 0x03, 0x0022, 0x0016}, {0x04, 0x03, -1, 0x03, 0x0022, 0x001e}, {0x04, 0x04, -1, 0x03, 0x0022, 0x0026}, {0x04, 0x04, -1, 0x03, 0x0022, 0x0036}, {0x04, 0x01, -1, 0x03, 0x0032, 0x000a}, {0x04, 0x01, -1, 0x03, 0x0032, 0x000c}, {0x04, 0x02, -1, 0x03, 0x0032, 0x000e}, {0x04, 0x02, -1, 0x03, 0x0032, 0x0012}, {0x04, 0x03, -1, 0x03, 0x0032, 0x0016}, {0x04, 0x03, -1, 0x03, 0x0032, 0x001e}, {0x04, 0x04, -1, 0x03, 0x0032, 0x0026}, {0x04, 0x04, -1, 0x03, 0x0032, 0x0036}, {0x05, 0x01, -1, 0x03, 0x0042, 0x000a}, {0x05, 0x01, -1, 0x03, 0x0042, 0x000c}, {0x05, 0x02, -1, 0x03, 0x0042, 0x000e}, {0x05, 0x02, -1, 0x03, 0x0042, 0x0012}, {0x05, 0x03, -1, 0x03, 0x0042, 0x0016}, {0x05, 0x03, -1, 0x03, 0x0042, 0x001e}, {0x05, 0x04, -1, 0x03, 0x0042, 0x0026}, {0x05, 0x04, -1, 0x03, 0x0042, 0x0036}, {0x05, 0x01, -1, 0x03, 0x0062, 0x000a}, {0x05, 0x01, -1, 0x03, 0x0062, 0x000c}, {0x05, 0x02, -1, 0x03, 0x0062, 0x000e}, {0x05, 0x02, -1, 0x03, 0x0062, 0x0012}, {0x05, 0x03, -1, 0x03, 0x0062, 0x0016}, {0x05, 0x03, -1, 0x03, 0x0062, 0x001e}, {0x05, 0x04, -1, 0x03, 0x0062, 0x0026}, {0x05, 0x04, -1, 0x03, 0x0062, 0x0036}, {0x00, 0x05, -1, 0x03, 0x0000, 0x0046}, {0x00, 0x05, -1, 0x03, 0x0000, 0x0066}, {0x00, 0x06, -1, 0x03, 0x0000, 0x0086}, {0x00, 0x07, -1, 0x03, 0x0000, 0x00c6}, {0x00, 0x08, -1, 0x03, 0x0000, 0x0146}, {0x00, 0x09, -1, 0x03, 0x0000, 0x0246}, {0x00, 0x0a, -1, 0x03, 0x0000, 0x0446}, {0x00, 0x18, -1, 0x03, 0x0000, 0x0846}, {0x00, 0x05, -1, 0x03, 0x0001, 0x0046}, {0x00, 0x05, -1, 0x03, 0x0001, 0x0066}, {0x00, 0x06, -1, 0x03, 0x0001, 0x0086}, {0x00, 0x07, -1, 0x03, 0x0001, 0x00c6}, {0x00, 0x08, -1, 0x03, 0x0001, 0x0146}, {0x00, 0x09, -1, 0x03, 0x0001, 0x0246}, {0x00, 0x0a, -1, 0x03, 0x0001, 0x0446}, {0x00, 0x18, -1, 0x03, 0x0001, 0x0846}, {0x00, 0x05, -1, 0x03, 0x0002, 0x0046}, {0x00, 0x05, -1, 0x03, 0x0002, 0x0066}, {0x00, 0x06, -1, 0x03, 0x0002, 0x0086}, {0x00, 0x07, -1, 0x03, 0x0002, 0x00c6}, {0x00, 0x08, -1, 0x03, 0x0002, 0x0146}, {0x00, 0x09, -1, 0x03, 0x0002, 0x0246}, {0x00, 0x0a, -1, 0x03, 0x0002, 0x0446}, {0x00, 0x18, -1, 0x03, 0x0002, 0x0846}, {0x00, 0x05, -1, 0x03, 0x0003, 0x0046}, {0x00, 0x05, -1, 0x03, 0x0003, 0x0066}, {0x00, 0x06, -1, 0x03, 0x0003, 0x0086}, {0x00, 0x07, -1, 0x03, 0x0003, 0x00c6}, {0x00, 0x08, -1, 0x03, 0x0003, 0x0146}, {0x00, 0x09, -1, 0x03, 0x0003, 0x0246}, {0x00, 0x0a, -1, 0x03, 0x0003, 0x0446}, {0x00, 0x18, -1, 0x03, 0x0003, 0x0846}, {0x00, 0x05, -1, 0x03, 0x0004, 0x0046}, {0x00, 0x05, -1, 0x03, 0x0004, 0x0066}, {0x00, 0x06, -1, 0x03, 0x0004, 0x0086}, {0x00, 0x07, -1, 0x03, 0x0004, 0x00c6}, {0x00, 0x08, -1, 0x03, 0x0004, 0x0146}, {0x00, 0x09, -1, 0x03, 0x0004, 0x0246}, {0x00, 0x0a, -1, 0x03, 0x0004, 0x0446}, {0x00, 0x18, -1, 0x03, 0x0004, 0x0846}, {0x00, 0x05, -1, 0x03, 0x0005, 0x0046}, {0x00, 0x05, -1, 0x03, 0x0005, 0x0066}, {0x00, 0x06, -1, 0x03, 0x0005, 0x0086}, {0x00, 0x07, -1, 0x03, 0x0005, 0x00c6}, {0x00, 0x08, -1, 0x03, 0x0005, 0x0146}, {0x00, 0x09, -1, 0x03, 0x0005, 0x0246}, {0x00, 0x0a, -1, 0x03, 0x0005, 0x0446}, {0x00, 0x18, -1, 0x03, 0x0005, 0x0846}, {0x01, 0x05, -1, 0x03, 0x0006, 0x0046}, {0x01, 0x05, -1, 0x03, 0x0006, 0x0066}, {0x01, 0x06, -1, 0x03, 0x0006, 0x0086}, {0x01, 0x07, -1, 0x03, 0x0006, 0x00c6}, {0x01, 0x08, -1, 0x03, 0x0006, 0x0146}, {0x01, 0x09, -1, 0x03, 0x0006, 0x0246}, {0x01, 0x0a, -1, 0x03, 0x0006, 0x0446}, {0x01, 0x18, -1, 0x03, 0x0006, 0x0846}, {0x01, 0x05, -1, 0x03, 0x0008, 0x0046}, {0x01, 0x05, -1, 0x03, 0x0008, 0x0066}, {0x01, 0x06, -1, 0x03, 0x0008, 0x0086}, {0x01, 0x07, -1, 0x03, 0x0008, 0x00c6}, {0x01, 0x08, -1, 0x03, 0x0008, 0x0146}, {0x01, 0x09, -1, 0x03, 0x0008, 0x0246}, {0x01, 0x0a, -1, 0x03, 0x0008, 0x0446}, {0x01, 0x18, -1, 0x03, 0x0008, 0x0846}, {0x06, 0x00, -1, 0x00, 0x0082, 0x0002}, {0x06, 0x00, -1, 0x01, 0x0082, 0x0003}, {0x06, 0x00, -1, 0x02, 0x0082, 0x0004}, {0x06, 0x00, -1, 0x03, 0x0082, 0x0005}, {0x06, 0x00, -1, 0x03, 0x0082, 0x0006}, {0x06, 0x00, -1, 0x03, 0x0082, 0x0007}, {0x06, 0x00, -1, 0x03, 0x0082, 0x0008}, {0x06, 0x00, -1, 0x03, 0x0082, 0x0009}, {0x07, 0x00, -1, 0x00, 0x00c2, 0x0002}, {0x07, 0x00, -1, 0x01, 0x00c2, 0x0003}, {0x07, 0x00, -1, 0x02, 0x00c2, 0x0004}, {0x07, 0x00, -1, 0x03, 0x00c2, 0x0005}, {0x07, 0x00, -1, 0x03, 0x00c2, 0x0006}, {0x07, 0x00, -1, 0x03, 0x00c2, 0x0007}, {0x07, 0x00, -1, 0x03, 0x00c2, 0x0008}, {0x07, 0x00, -1, 0x03, 0x00c2, 0x0009}, {0x08, 0x00, -1, 0x00, 0x0142, 0x0002}, {0x08, 0x00, -1, 0x01, 0x0142, 0x0003}, {0x08, 0x00, -1, 0x02, 0x0142, 0x0004}, {0x08, 0x00, -1, 0x03, 0x0142, 0x0005}, {0x08, 0x00, -1, 0x03, 0x0142, 0x0006}, {0x08, 0x00, -1, 0x03, 0x0142, 0x0007}, {0x08, 0x00, -1, 0x03, 0x0142, 0x0008}, {0x08, 0x00, -1, 0x03, 0x0142, 0x0009}, {0x09, 0x00, -1, 0x00, 0x0242, 0x0002}, {0x09, 0x00, -1, 0x01, 0x0242, 0x0003}, {0x09, 0x00, -1, 0x02, 0x0242, 0x0004}, {0x09, 0x00, -1, 0x03, 0x0242, 0x0005}, {0x09, 0x00, -1, 0x03, 0x0242, 0x0006}, {0x09, 0x00, -1, 0x03, 0x0242, 0x0007}, {0x09, 0x00, -1, 0x03, 0x0242, 0x0008}, {0x09, 0x00, -1, 0x03, 0x0242, 0x0009}, {0x0a, 0x00, -1, 0x00, 0x0442, 0x0002}, {0x0a, 0x00, -1, 0x01, 0x0442, 0x0003}, {0x0a, 0x00, -1, 0x02, 0x0442, 0x0004}, {0x0a, 0x00, -1, 0x03, 0x0442, 0x0005}, {0x0a, 0x00, -1, 0x03, 0x0442, 0x0006}, {0x0a, 0x00, -1, 0x03, 0x0442, 0x0007}, {0x0a, 0x00, -1, 0x03, 0x0442, 0x0008}, {0x0a, 0x00, -1, 0x03, 0x0442, 0x0009}, {0x0c, 0x00, -1, 0x00, 0x0842, 0x0002}, {0x0c, 0x00, -1, 0x01, 0x0842, 0x0003}, {0x0c, 0x00, -1, 0x02, 0x0842, 0x0004}, {0x0c, 0x00, -1, 0x03, 0x0842, 0x0005}, {0x0c, 0x00, -1, 0x03, 0x0842, 0x0006}, {0x0c, 0x00, -1, 0x03, 0x0842, 0x0007}, {0x0c, 0x00, -1, 0x03, 0x0842, 0x0008}, {0x0c, 0x00, -1, 0x03, 0x0842, 0x0009}, {0x0e, 0x00, -1, 0x00, 0x1842, 0x0002}, {0x0e, 0x00, -1, 0x01, 0x1842, 0x0003}, {0x0e, 0x00, -1, 0x02, 0x1842, 0x0004}, {0x0e, 0x00, -1, 0x03, 0x1842, 0x0005}, {0x0e, 0x00, -1, 0x03, 0x1842, 0x0006}, {0x0e, 0x00, -1, 0x03, 0x1842, 0x0007}, {0x0e, 0x00, -1, 0x03, 0x1842, 0x0008}, {0x0e, 0x00, -1, 0x03, 0x1842, 0x0009}, {0x18, 0x00, -1, 0x00, 0x5842, 0x0002}, {0x18, 0x00, -1, 0x01, 0x5842, 0x0003}, {0x18, 0x00, -1, 0x02, 0x5842, 0x0004}, {0x18, 0x00, -1, 0x03, 0x5842, 0x0005}, {0x18, 0x00, -1, 0x03, 0x5842, 0x0006}, {0x18, 0x00, -1, 0x03, 0x5842, 0x0007}, {0x18, 0x00, -1, 0x03, 0x5842, 0x0008}, {0x18, 0x00, -1, 0x03, 0x5842, 0x0009}, {0x02, 0x05, -1, 0x03, 0x000a, 0x0046}, {0x02, 0x05, -1, 0x03, 0x000a, 0x0066}, {0x02, 0x06, -1, 0x03, 0x000a, 0x0086}, {0x02, 0x07, -1, 0x03, 0x000a, 0x00c6}, {0x02, 0x08, -1, 0x03, 0x000a, 0x0146}, {0x02, 0x09, -1, 0x03, 0x000a, 0x0246}, {0x02, 0x0a, -1, 0x03, 0x000a, 0x0446}, {0x02, 0x18, -1, 0x03, 0x000a, 0x0846}, {0x02, 0x05, -1, 0x03, 0x000e, 0x0046}, {0x02, 0x05, -1, 0x03, 0x000e, 0x0066}, {0x02, 0x06, -1, 0x03, 0x000e, 0x0086}, {0x02, 0x07, -1, 0x03, 0x000e, 0x00c6}, {0x02, 0x08, -1, 0x03, 0x000e, 0x0146}, {0x02, 0x09, -1, 0x03, 0x000e, 0x0246}, {0x02, 0x0a, -1, 0x03, 0x000e, 0x0446}, {0x02, 0x18, -1, 0x03, 0x000e, 0x0846}, {0x03, 0x05, -1, 0x03, 0x0012, 0x0046}, {0x03, 0x05, -1, 0x03, 0x0012, 0x0066}, {0x03, 0x06, -1, 0x03, 0x0012, 0x0086}, {0x03, 0x07, -1, 0x03, 0x0012, 0x00c6}, {0x03, 0x08, -1, 0x03, 0x0012, 0x0146}, {0x03, 0x09, -1, 0x03, 0x0012, 0x0246}, {0x03, 0x0a, -1, 0x03, 0x0012, 0x0446}, {0x03, 0x18, -1, 0x03, 0x0012, 0x0846}, {0x03, 0x05, -1, 0x03, 0x001a, 0x0046}, {0x03, 0x05, -1, 0x03, 0x001a, 0x0066}, {0x03, 0x06, -1, 0x03, 0x001a, 0x0086}, {0x03, 0x07, -1, 0x03, 0x001a, 0x00c6}, {0x03, 0x08, -1, 0x03, 0x001a, 0x0146}, {0x03, 0x09, -1, 0x03, 0x001a, 0x0246}, {0x03, 0x0a, -1, 0x03, 0x001a, 0x0446}, {0x03, 0x18, -1, 0x03, 0x001a, 0x0846}, {0x04, 0x05, -1, 0x03, 0x0022, 0x0046}, {0x04, 0x05, -1, 0x03, 0x0022, 0x0066}, {0x04, 0x06, -1, 0x03, 0x0022, 0x0086}, {0x04, 0x07, -1, 0x03, 0x0022, 0x00c6}, {0x04, 0x08, -1, 0x03, 0x0022, 0x0146}, {0x04, 0x09, -1, 0x03, 0x0022, 0x0246}, {0x04, 0x0a, -1, 0x03, 0x0022, 0x0446}, {0x04, 0x18, -1, 0x03, 0x0022, 0x0846}, {0x04, 0x05, -1, 0x03, 0x0032, 0x0046}, {0x04, 0x05, -1, 0x03, 0x0032, 0x0066}, {0x04, 0x06, -1, 0x03, 0x0032, 0x0086}, {0x04, 0x07, -1, 0x03, 0x0032, 0x00c6}, {0x04, 0x08, -1, 0x03, 0x0032, 0x0146}, {0x04, 0x09, -1, 0x03, 0x0032, 0x0246}, {0x04, 0x0a, -1, 0x03, 0x0032, 0x0446}, {0x04, 0x18, -1, 0x03, 0x0032, 0x0846}, {0x05, 0x05, -1, 0x03, 0x0042, 0x0046}, {0x05, 0x05, -1, 0x03, 0x0042, 0x0066}, {0x05, 0x06, -1, 0x03, 0x0042, 0x0086}, {0x05, 0x07, -1, 0x03, 0x0042, 0x00c6}, {0x05, 0x08, -1, 0x03, 0x0042, 0x0146}, {0x05, 0x09, -1, 0x03, 0x0042, 0x0246}, {0x05, 0x0a, -1, 0x03, 0x0042, 0x0446}, {0x05, 0x18, -1, 0x03, 0x0042, 0x0846}, {0x05, 0x05, -1, 0x03, 0x0062, 0x0046}, {0x05, 0x05, -1, 0x03, 0x0062, 0x0066}, {0x05, 0x06, -1, 0x03, 0x0062, 0x0086}, {0x05, 0x07, -1, 0x03, 0x0062, 0x00c6}, {0x05, 0x08, -1, 0x03, 0x0062, 0x0146}, {0x05, 0x09, -1, 0x03, 0x0062, 0x0246}, {0x05, 0x0a, -1, 0x03, 0x0062, 0x0446}, {0x05, 0x18, -1, 0x03, 0x0062, 0x0846}, {0x06, 0x01, -1, 0x03, 0x0082, 0x000a}, {0x06, 0x01, -1, 0x03, 0x0082, 0x000c}, {0x06, 0x02, -1, 0x03, 0x0082, 0x000e}, {0x06, 0x02, -1, 0x03, 0x0082, 0x0012}, {0x06, 0x03, -1, 0x03, 0x0082, 0x0016}, {0x06, 0x03, -1, 0x03, 0x0082, 0x001e}, {0x06, 0x04, -1, 0x03, 0x0082, 0x0026}, {0x06, 0x04, -1, 0x03, 0x0082, 0x0036}, {0x07, 0x01, -1, 0x03, 0x00c2, 0x000a}, {0x07, 0x01, -1, 0x03, 0x00c2, 0x000c}, {0x07, 0x02, -1, 0x03, 0x00c2, 0x000e}, {0x07, 0x02, -1, 0x03, 0x00c2, 0x0012}, {0x07, 0x03, -1, 0x03, 0x00c2, 0x0016}, {0x07, 0x03, -1, 0x03, 0x00c2, 0x001e}, {0x07, 0x04, -1, 0x03, 0x00c2, 0x0026}, {0x07, 0x04, -1, 0x03, 0x00c2, 0x0036}, {0x08, 0x01, -1, 0x03, 0x0142, 0x000a}, {0x08, 0x01, -1, 0x03, 0x0142, 0x000c}, {0x08, 0x02, -1, 0x03, 0x0142, 0x000e}, {0x08, 0x02, -1, 0x03, 0x0142, 0x0012}, {0x08, 0x03, -1, 0x03, 0x0142, 0x0016}, {0x08, 0x03, -1, 0x03, 0x0142, 0x001e}, {0x08, 0x04, -1, 0x03, 0x0142, 0x0026}, {0x08, 0x04, -1, 0x03, 0x0142, 0x0036}, {0x09, 0x01, -1, 0x03, 0x0242, 0x000a}, {0x09, 0x01, -1, 0x03, 0x0242, 0x000c}, {0x09, 0x02, -1, 0x03, 0x0242, 0x000e}, {0x09, 0x02, -1, 0x03, 0x0242, 0x0012}, {0x09, 0x03, -1, 0x03, 0x0242, 0x0016}, {0x09, 0x03, -1, 0x03, 0x0242, 0x001e}, {0x09, 0x04, -1, 0x03, 0x0242, 0x0026}, {0x09, 0x04, -1, 0x03, 0x0242, 0x0036}, {0x0a, 0x01, -1, 0x03, 0x0442, 0x000a}, {0x0a, 0x01, -1, 0x03, 0x0442, 0x000c}, {0x0a, 0x02, -1, 0x03, 0x0442, 0x000e}, {0x0a, 0x02, -1, 0x03, 0x0442, 0x0012}, {0x0a, 0x03, -1, 0x03, 0x0442, 0x0016}, {0x0a, 0x03, -1, 0x03, 0x0442, 0x001e}, {0x0a, 0x04, -1, 0x03, 0x0442, 0x0026}, {0x0a, 0x04, -1, 0x03, 0x0442, 0x0036}, {0x0c, 0x01, -1, 0x03, 0x0842, 0x000a}, {0x0c, 0x01, -1, 0x03, 0x0842, 0x000c}, {0x0c, 0x02, -1, 0x03, 0x0842, 0x000e}, {0x0c, 0x02, -1, 0x03, 0x0842, 0x0012}, {0x0c, 0x03, -1, 0x03, 0x0842, 0x0016}, {0x0c, 0x03, -1, 0x03, 0x0842, 0x001e}, {0x0c, 0x04, -1, 0x03, 0x0842, 0x0026}, {0x0c, 0x04, -1, 0x03, 0x0842, 0x0036}, {0x0e, 0x01, -1, 0x03, 0x1842, 0x000a}, {0x0e, 0x01, -1, 0x03, 0x1842, 0x000c}, {0x0e, 0x02, -1, 0x03, 0x1842, 0x000e}, {0x0e, 0x02, -1, 0x03, 0x1842, 0x0012}, {0x0e, 0x03, -1, 0x03, 0x1842, 0x0016}, {0x0e, 0x03, -1, 0x03, 0x1842, 0x001e}, {0x0e, 0x04, -1, 0x03, 0x1842, 0x0026}, {0x0e, 0x04, -1, 0x03, 0x1842, 0x0036}, {0x18, 0x01, -1, 0x03, 0x5842, 0x000a}, {0x18, 0x01, -1, 0x03, 0x5842, 0x000c}, {0x18, 0x02, -1, 0x03, 0x5842, 0x000e}, {0x18, 0x02, -1, 0x03, 0x5842, 0x0012}, {0x18, 0x03, -1, 0x03, 0x5842, 0x0016}, {0x18, 0x03, -1, 0x03, 0x5842, 0x001e}, {0x18, 0x04, -1, 0x03, 0x5842, 0x0026}, {0x18, 0x04, -1, 0x03, 0x5842, 0x0036}, {0x06, 0x05, -1, 0x03, 0x0082, 0x0046}, {0x06, 0x05, -1, 0x03, 0x0082, 0x0066}, {0x06, 0x06, -1, 0x03, 0x0082, 0x0086}, {0x06, 0x07, -1, 0x03, 0x0082, 0x00c6}, {0x06, 0x08, -1, 0x03, 0x0082, 0x0146}, {0x06, 0x09, -1, 0x03, 0x0082, 0x0246}, {0x06, 0x0a, -1, 0x03, 0x0082, 0x0446}, {0x06, 0x18, -1, 0x03, 0x0082, 0x0846}, {0x07, 0x05, -1, 0x03, 0x00c2, 0x0046}, {0x07, 0x05, -1, 0x03, 0x00c2, 0x0066}, {0x07, 0x06, -1, 0x03, 0x00c2, 0x0086}, {0x07, 0x07, -1, 0x03, 0x00c2, 0x00c6}, {0x07, 0x08, -1, 0x03, 0x00c2, 0x0146}, {0x07, 0x09, -1, 0x03, 0x00c2, 0x0246}, {0x07, 0x0a, -1, 0x03, 0x00c2, 0x0446}, {0x07, 0x18, -1, 0x03, 0x00c2, 0x0846}, {0x08, 0x05, -1, 0x03, 0x0142, 0x0046}, {0x08, 0x05, -1, 0x03, 0x0142, 0x0066}, {0x08, 0x06, -1, 0x03, 0x0142, 0x0086}, {0x08, 0x07, -1, 0x03, 0x0142, 0x00c6}, {0x08, 0x08, -1, 0x03, 0x0142, 0x0146}, {0x08, 0x09, -1, 0x03, 0x0142, 0x0246}, {0x08, 0x0a, -1, 0x03, 0x0142, 0x0446}, {0x08, 0x18, -1, 0x03, 0x0142, 0x0846}, {0x09, 0x05, -1, 0x03, 0x0242, 0x0046}, {0x09, 0x05, -1, 0x03, 0x0242, 0x0066}, {0x09, 0x06, -1, 0x03, 0x0242, 0x0086}, {0x09, 0x07, -1, 0x03, 0x0242, 0x00c6}, {0x09, 0x08, -1, 0x03, 0x0242, 0x0146}, {0x09, 0x09, -1, 0x03, 0x0242, 0x0246}, {0x09, 0x0a, -1, 0x03, 0x0242, 0x0446}, {0x09, 0x18, -1, 0x03, 0x0242, 0x0846}, {0x0a, 0x05, -1, 0x03, 0x0442, 0x0046}, {0x0a, 0x05, -1, 0x03, 0x0442, 0x0066}, {0x0a, 0x06, -1, 0x03, 0x0442, 0x0086}, {0x0a, 0x07, -1, 0x03, 0x0442, 0x00c6}, {0x0a, 0x08, -1, 0x03, 0x0442, 0x0146}, {0x0a, 0x09, -1, 0x03, 0x0442, 0x0246}, {0x0a, 0x0a, -1, 0x03, 0x0442, 0x0446}, {0x0a, 0x18, -1, 0x03, 0x0442, 0x0846}, {0x0c, 0x05, -1, 0x03, 0x0842, 0x0046}, {0x0c, 0x05, -1, 0x03, 0x0842, 0x0066}, {0x0c, 0x06, -1, 0x03, 0x0842, 0x0086}, {0x0c, 0x07, -1, 0x03, 0x0842, 0x00c6}, {0x0c, 0x08, -1, 0x03, 0x0842, 0x0146}, {0x0c, 0x09, -1, 0x03, 0x0842, 0x0246}, {0x0c, 0x0a, -1, 0x03, 0x0842, 0x0446}, {0x0c, 0x18, -1, 0x03, 0x0842, 0x0846}, {0x0e, 0x05, -1, 0x03, 0x1842, 0x0046}, {0x0e, 0x05, -1, 0x03, 0x1842, 0x0066}, {0x0e, 0x06, -1, 0x03, 0x1842, 0x0086}, {0x0e, 0x07, -1, 0x03, 0x1842, 0x00c6}, {0x0e, 0x08, -1, 0x03, 0x1842, 0x0146}, {0x0e, 0x09, -1, 0x03, 0x1842, 0x0246}, {0x0e, 0x0a, -1, 0x03, 0x1842, 0x0446}, {0x0e, 0x18, -1, 0x03, 0x1842, 0x0846}, {0x18, 0x05, -1, 0x03, 0x5842, 0x0046}, {0x18, 0x05, -1, 0x03, 0x5842, 0x0066}, {0x18, 0x06, -1, 0x03, 0x5842, 0x0086}, {0x18, 0x07, -1, 0x03, 0x5842, 0x00c6}, {0x18, 0x08, -1, 0x03, 0x5842, 0x0146}, {0x18, 0x09, -1, 0x03, 0x5842, 0x0246}, {0x18, 0x0a, -1, 0x03, 0x5842, 0x0446}, {0x18, 0x18, -1, 0x03, 0x5842, 0x0846}}; CONSTANT uint8_t kCodeLengthCodeOrder[18] = { 1, 2, 3, 4, 0, 5, 17, 6, 16, 7, 8, 9, 10, 11, 12, 13, 14, 15}; CONSTANT uint8_t kCodeLengthPrefixLength[16] = {2, 2, 2, 3, 2, 2, 2, 4, 2, 2, 2, 3, 2, 2, 2, 4}; CONSTANT uint8_t kCodeLengthPrefixValue[16] = {0, 4, 3, 2, 0, 4, 3, 1, 0, 4, 3, 2, 0, 4, 3, 5}; // Represents the range of values belonging to a prefix code: [offset, offset + 2^nbits) CONSTANT uint16_t kBlockLengthPrefixCodeOffset[brotli_num_block_len_symbols] = { 1, 5, 9, 13, 17, 25, 33, 41, 49, 65, 81, 97, 113, 145, 177, 209, 241, 305, 369, 497, 753, 1265, 2289, 4337, 8433, 16625}; CONSTANT uint8_t kBlockLengthPrefixCodeBits[brotli_num_block_len_symbols] = { 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 6, 6, 7, 8, 9, 10, 11, 12, 13, 24}; // Maximum possible Huffman table size for an alphabet size of (index * 32), // max code length 15 and root table bits 8. CONSTANT uint16_t kMaxHuffmanTableSize[] = { 256, 402, 436, 468, 500, 534, 566, 598, 630, 662, 694, 726, 758, 790, 822, 854, 886, 920, 952, 984, 1016, 1048, 1080, 1112, 1144, 1176, 1208, 1240, 1272, 1304, 1336, 1368, 1400, 1432, 1464, 1496, 1528}; constexpr int brotli_huffman_max_size_26 = 396; constexpr int brotli_huffman_max_size_258 = 632; // Max table size for context map constexpr int brotli_huffman_max_size_272 = 646; enum brotli_transform_type_e { BROTLI_TRANSFORM_IDENTITY = 0, BROTLI_TRANSFORM_OMIT_LAST_1 = 1, BROTLI_TRANSFORM_OMIT_LAST_2 = 2, BROTLI_TRANSFORM_OMIT_LAST_3 = 3, BROTLI_TRANSFORM_OMIT_LAST_4 = 4, BROTLI_TRANSFORM_OMIT_LAST_5 = 5, BROTLI_TRANSFORM_OMIT_LAST_6 = 6, BROTLI_TRANSFORM_OMIT_LAST_7 = 7, BROTLI_TRANSFORM_OMIT_LAST_8 = 8, BROTLI_TRANSFORM_OMIT_LAST_9 = 9, BROTLI_TRANSFORM_UPPERCASE_FIRST = 10, BROTLI_TRANSFORM_UPPERCASE_ALL = 11, BROTLI_TRANSFORM_OMIT_FIRST_1 = 12, BROTLI_TRANSFORM_OMIT_FIRST_2 = 13, BROTLI_TRANSFORM_OMIT_FIRST_3 = 14, BROTLI_TRANSFORM_OMIT_FIRST_4 = 15, BROTLI_TRANSFORM_OMIT_FIRST_5 = 16, BROTLI_TRANSFORM_OMIT_FIRST_6 = 17, BROTLI_TRANSFORM_OMIT_FIRST_7 = 18, BROTLI_TRANSFORM_OMIT_FIRST_8 = 19, BROTLI_TRANSFORM_OMIT_FIRST_9 = 20, BROTLI_NUM_TRANSFORM_TYPES // Counts transforms, not a transform itself. }; /* RFC 7932 transforms string data */ CONSTANT uint8_t kPrefixSuffix[217] = { 0x01, 0x20, 0x02, 0x2C, 0x20, 0x08, 0x20, 0x6F, 0x66, 0x20, 0x74, 0x68, 0x65, 0x20, 0x04, 0x20, 0x6F, 0x66, 0x20, 0x02, 0x73, 0x20, 0x01, 0x2E, 0x05, 0x20, 0x61, 0x6E, 0x64, 0x20, 0x04, 0x20, 0x69, 0x6E, 0x20, 0x01, 0x22, 0x04, 0x20, 0x74, 0x6F, 0x20, 0x02, 0x22, 0x3E, 0x01, 0x0A, 0x02, 0x2E, 0x20, 0x01, 0x5D, 0x05, 0x20, 0x66, 0x6F, 0x72, 0x20, 0x03, 0x20, 0x61, 0x20, 0x06, 0x20, 0x74, 0x68, 0x61, 0x74, 0x20, 0x01, 0x27, 0x06, 0x20, 0x77, 0x69, 0x74, 0x68, 0x20, 0x06, 0x20, 0x66, 0x72, 0x6F, 0x6D, 0x20, 0x04, 0x20, 0x62, 0x79, 0x20, 0x01, 0x28, 0x06, 0x2E, 0x20, 0x54, 0x68, 0x65, 0x20, 0x04, 0x20, 0x6F, 0x6E, 0x20, 0x04, 0x20, 0x61, 0x73, 0x20, 0x04, 0x20, 0x69, 0x73, 0x20, 0x04, 0x69, 0x6E, 0x67, 0x20, 0x02, 0x0A, 0x09, 0x01, 0x3A, 0x03, 0x65, 0x64, 0x20, 0x02, 0x3D, 0x22, 0x04, 0x20, 0x61, 0x74, 0x20, 0x03, 0x6C, 0x79, 0x20, 0x01, 0x2C, 0x02, 0x3D, 0x27, 0x05, 0x2E, 0x63, 0x6F, 0x6D, 0x2F, 0x07, 0x2E, 0x20, 0x54, 0x68, 0x69, 0x73, 0x20, 0x05, 0x20, 0x6E, 0x6F, 0x74, 0x20, 0x03, 0x65, 0x72, 0x20, 0x03, 0x61, 0x6C, 0x20, 0x04, 0x66, 0x75, 0x6C, 0x20, 0x04, 0x69, 0x76, 0x65, 0x20, 0x05, 0x6C, 0x65, 0x73, 0x73, 0x20, 0x04, 0x65, 0x73, 0x74, 0x20, 0x04, 0x69, 0x7A, 0x65, 0x20, 0x02, 0xC2, 0xA0, 0x04, 0x6F, 0x75, 0x73, 0x20, 0x05, 0x20, 0x74, 0x68, 0x65, 0x20, 0x02, 0x65, 0x20, 0x00}; CONSTANT uint16_t kPrefixSuffixMap[50] = { 0x00, 0x02, 0x05, 0x0E, 0x13, 0x16, 0x18, 0x1E, 0x23, 0x25, 0x2A, 0x2D, 0x2F, 0x32, 0x34, 0x3A, 0x3E, 0x45, 0x47, 0x4E, 0x55, 0x5A, 0x5C, 0x63, 0x68, 0x6D, 0x72, 0x77, 0x7A, 0x7C, 0x80, 0x83, 0x88, 0x8C, 0x8E, 0x91, 0x97, 0x9F, 0xA5, 0xA9, 0xAD, 0xB2, 0xB7, 0xBD, 0xC2, 0xC7, 0xCA, 0xCF, 0xD5, 0xD8}; /* RFC 7932 transforms */ /* Each entry is a [prefix_id, transform, suffix_id] triplet. */ CONSTANT uint8_t kTransformsData[] = { 49, BROTLI_TRANSFORM_IDENTITY, 49, 49, BROTLI_TRANSFORM_IDENTITY, 0, 0, BROTLI_TRANSFORM_IDENTITY, 0, 49, BROTLI_TRANSFORM_OMIT_FIRST_1, 49, 49, BROTLI_TRANSFORM_UPPERCASE_FIRST, 0, 49, BROTLI_TRANSFORM_IDENTITY, 47, 0, BROTLI_TRANSFORM_IDENTITY, 49, 4, BROTLI_TRANSFORM_IDENTITY, 0, 49, BROTLI_TRANSFORM_IDENTITY, 3, 49, BROTLI_TRANSFORM_UPPERCASE_FIRST, 49, 49, BROTLI_TRANSFORM_IDENTITY, 6, 49, BROTLI_TRANSFORM_OMIT_FIRST_2, 49, 49, BROTLI_TRANSFORM_OMIT_LAST_1, 49, 1, BROTLI_TRANSFORM_IDENTITY, 0, 49, BROTLI_TRANSFORM_IDENTITY, 1, 0, BROTLI_TRANSFORM_UPPERCASE_FIRST, 0, 49, BROTLI_TRANSFORM_IDENTITY, 7, 49, BROTLI_TRANSFORM_IDENTITY, 9, 48, BROTLI_TRANSFORM_IDENTITY, 0, 49, BROTLI_TRANSFORM_IDENTITY, 8, 49, BROTLI_TRANSFORM_IDENTITY, 5, 49, BROTLI_TRANSFORM_IDENTITY, 10, 49, BROTLI_TRANSFORM_IDENTITY, 11, 49, BROTLI_TRANSFORM_OMIT_LAST_3, 49, 49, BROTLI_TRANSFORM_IDENTITY, 13, 49, BROTLI_TRANSFORM_IDENTITY, 14, 49, BROTLI_TRANSFORM_OMIT_FIRST_3, 49, 49, BROTLI_TRANSFORM_OMIT_LAST_2, 49, 49, BROTLI_TRANSFORM_IDENTITY, 15, 49, BROTLI_TRANSFORM_IDENTITY, 16, 0, BROTLI_TRANSFORM_UPPERCASE_FIRST, 49, 49, BROTLI_TRANSFORM_IDENTITY, 12, 5, BROTLI_TRANSFORM_IDENTITY, 49, 0, BROTLI_TRANSFORM_IDENTITY, 1, 49, BROTLI_TRANSFORM_OMIT_FIRST_4, 49, 49, BROTLI_TRANSFORM_IDENTITY, 18, 49, BROTLI_TRANSFORM_IDENTITY, 17, 49, BROTLI_TRANSFORM_IDENTITY, 19, 49, BROTLI_TRANSFORM_IDENTITY, 20, 49, BROTLI_TRANSFORM_OMIT_FIRST_5, 49, 49, BROTLI_TRANSFORM_OMIT_FIRST_6, 49, 47, BROTLI_TRANSFORM_IDENTITY, 49, 49, BROTLI_TRANSFORM_OMIT_LAST_4, 49, 49, BROTLI_TRANSFORM_IDENTITY, 22, 49, BROTLI_TRANSFORM_UPPERCASE_ALL, 49, 49, BROTLI_TRANSFORM_IDENTITY, 23, 49, BROTLI_TRANSFORM_IDENTITY, 24, 49, BROTLI_TRANSFORM_IDENTITY, 25, 49, BROTLI_TRANSFORM_OMIT_LAST_7, 49, 49, BROTLI_TRANSFORM_OMIT_LAST_1, 26, 49, BROTLI_TRANSFORM_IDENTITY, 27, 49, BROTLI_TRANSFORM_IDENTITY, 28, 0, BROTLI_TRANSFORM_IDENTITY, 12, 49, BROTLI_TRANSFORM_IDENTITY, 29, 49, BROTLI_TRANSFORM_OMIT_FIRST_9, 49, 49, BROTLI_TRANSFORM_OMIT_FIRST_7, 49, 49, BROTLI_TRANSFORM_OMIT_LAST_6, 49, 49, BROTLI_TRANSFORM_IDENTITY, 21, 49, BROTLI_TRANSFORM_UPPERCASE_FIRST, 1, 49, BROTLI_TRANSFORM_OMIT_LAST_8, 49, 49, BROTLI_TRANSFORM_IDENTITY, 31, 49, BROTLI_TRANSFORM_IDENTITY, 32, 47, BROTLI_TRANSFORM_IDENTITY, 3, 49, BROTLI_TRANSFORM_OMIT_LAST_5, 49, 49, BROTLI_TRANSFORM_OMIT_LAST_9, 49, 0, BROTLI_TRANSFORM_UPPERCASE_FIRST, 1, 49, BROTLI_TRANSFORM_UPPERCASE_FIRST, 8, 5, BROTLI_TRANSFORM_IDENTITY, 21, 49, BROTLI_TRANSFORM_UPPERCASE_ALL, 0, 49, BROTLI_TRANSFORM_UPPERCASE_FIRST, 10, 49, BROTLI_TRANSFORM_IDENTITY, 30, 0, BROTLI_TRANSFORM_IDENTITY, 5, 35, BROTLI_TRANSFORM_IDENTITY, 49, 47, BROTLI_TRANSFORM_IDENTITY, 2, 49, BROTLI_TRANSFORM_UPPERCASE_FIRST, 17, 49, BROTLI_TRANSFORM_IDENTITY, 36, 49, BROTLI_TRANSFORM_IDENTITY, 33, 5, BROTLI_TRANSFORM_IDENTITY, 0, 49, BROTLI_TRANSFORM_UPPERCASE_FIRST, 21, 49, BROTLI_TRANSFORM_UPPERCASE_FIRST, 5, 49, BROTLI_TRANSFORM_IDENTITY, 37, 0, BROTLI_TRANSFORM_IDENTITY, 30, 49, BROTLI_TRANSFORM_IDENTITY, 38, 0, BROTLI_TRANSFORM_UPPERCASE_ALL, 0, 49, BROTLI_TRANSFORM_IDENTITY, 39, 0, BROTLI_TRANSFORM_UPPERCASE_ALL, 49, 49, BROTLI_TRANSFORM_IDENTITY, 34, 49, BROTLI_TRANSFORM_UPPERCASE_ALL, 8, 49, BROTLI_TRANSFORM_UPPERCASE_FIRST, 12, 0, BROTLI_TRANSFORM_IDENTITY, 21, 49, BROTLI_TRANSFORM_IDENTITY, 40, 0, BROTLI_TRANSFORM_UPPERCASE_FIRST, 12, 49, BROTLI_TRANSFORM_IDENTITY, 41, 49, BROTLI_TRANSFORM_IDENTITY, 42, 49, BROTLI_TRANSFORM_UPPERCASE_ALL, 17, 49, BROTLI_TRANSFORM_IDENTITY, 43, 0, BROTLI_TRANSFORM_UPPERCASE_FIRST, 5, 49, BROTLI_TRANSFORM_UPPERCASE_ALL, 10, 0, BROTLI_TRANSFORM_IDENTITY, 34, 49, BROTLI_TRANSFORM_UPPERCASE_FIRST, 33, 49, BROTLI_TRANSFORM_IDENTITY, 44, 49, BROTLI_TRANSFORM_UPPERCASE_ALL, 5, 45, BROTLI_TRANSFORM_IDENTITY, 49, 0, BROTLI_TRANSFORM_IDENTITY, 33, 49, BROTLI_TRANSFORM_UPPERCASE_FIRST, 30, 49, BROTLI_TRANSFORM_UPPERCASE_ALL, 30, 49, BROTLI_TRANSFORM_IDENTITY, 46, 49, BROTLI_TRANSFORM_UPPERCASE_ALL, 1, 49, BROTLI_TRANSFORM_UPPERCASE_FIRST, 34, 0, BROTLI_TRANSFORM_UPPERCASE_FIRST, 33, 0, BROTLI_TRANSFORM_UPPERCASE_ALL, 30, 0, BROTLI_TRANSFORM_UPPERCASE_ALL, 1, 49, BROTLI_TRANSFORM_UPPERCASE_ALL, 33, 49, BROTLI_TRANSFORM_UPPERCASE_ALL, 21, 49, BROTLI_TRANSFORM_UPPERCASE_ALL, 12, 0, BROTLI_TRANSFORM_UPPERCASE_ALL, 5, 49, BROTLI_TRANSFORM_UPPERCASE_ALL, 34, 0, BROTLI_TRANSFORM_UPPERCASE_ALL, 12, 0, BROTLI_TRANSFORM_UPPERCASE_FIRST, 30, 0, BROTLI_TRANSFORM_UPPERCASE_ALL, 34, 0, BROTLI_TRANSFORM_UPPERCASE_FIRST, 34, }; CONSTANT int kNumTransforms = (int)(sizeof(kTransformsData) / (3 * sizeof(kTransformsData[0])));
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/comp/nvcomp_adapter.cuh
/* * Copyright (c) 2022-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #pragma once #include "gpuinflate.hpp" #include <cudf/utilities/span.hpp> #include <nvcomp.h> #include <rmm/cuda_stream_view.hpp> #include <rmm/device_uvector.hpp> #include <optional> namespace cudf::io::nvcomp { struct batched_args { rmm::device_uvector<void const*> input_data_ptrs; rmm::device_uvector<size_t> input_data_sizes; rmm::device_uvector<void*> output_data_ptrs; rmm::device_uvector<size_t> output_data_sizes; }; /** * @brief Split lists of src/dst device spans into lists of pointers/sizes. * * @param[in] inputs List of input buffers * @param[in] outputs List of output buffers * @param[in] stream CUDA stream to use */ batched_args create_batched_nvcomp_args(device_span<device_span<uint8_t const> const> inputs, device_span<device_span<uint8_t> const> outputs, rmm::cuda_stream_view stream); /** * @brief Convert nvcomp statuses and output sizes into cuIO compression results. */ void update_compression_results(device_span<nvcompStatus_t const> nvcomp_stats, device_span<size_t const> actual_output_sizes, device_span<compression_result> results, rmm::cuda_stream_view stream); /** * @brief Fill the result array based on the actual output sizes. */ void update_compression_results(device_span<size_t const> actual_output_sizes, device_span<compression_result> results, rmm::cuda_stream_view stream); /** * @brief Mark unsupported input chunks for skipping. */ void skip_unsupported_inputs(device_span<size_t> input_sizes, device_span<compression_result> results, std::optional<size_t> max_valid_input_size, rmm::cuda_stream_view stream); /** * @brief Returns the size of the largest input chunk and the total input size. */ std::pair<size_t, size_t> max_chunk_and_total_input_size(device_span<size_t const> input_sizes, rmm::cuda_stream_view stream); } // namespace cudf::io::nvcomp
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/comp/brotli_dict.hpp
/* * Copyright (c) 2018-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * Portions of this file are derived from Google's Brotli project at * https://github.com/google/brotli, original license text below. */ /* Copyright 2013 Google Inc. All Rights Reserved. Distributed under MIT license. See file LICENSE for detail or copy at https://opensource.org/licenses/MIT */ /* Copyright(c) 2009, 2010, 2013 - 2016 by the Brotli Authors. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files(the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and / or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions : The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ #pragma once #include <cstdint> namespace cudf { namespace io { struct brotli_dictionary_s { /** * Number of bits to encode index of dictionary word in a bucket. * * Specification: Appendix A. Static Dictionary Data * * Words in a dictionary are bucketed by length. * @c 0 means that there are no words of a given length. * Dictionary consists of words with length of [4..24] bytes. * Values at [0..3] and [25..31] indices should not be addressed. */ uint8_t size_bits_by_length[32]; /* assert(offset[i + 1] == offset[i] + (bits[i] ? (i << bits[i]) : 0)) */ uint32_t offsets_by_length[32]; /* Data array should obey to size_bits_by_length values. Specified size matches default (RFC 7932) dictionary. Its size is also equal to offsets_by_length[31] */ uint8_t data[122784]; }; constexpr int brotli_min_dictionary_word_length = 4; constexpr int brotli_max_dictionary_word_length = 24; brotli_dictionary_s const* get_brotli_dictionary(); } // namespace io } // namespace cudf
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/comp/statistics.cu
/* * Copyright (c) 2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "gpuinflate.hpp" #include <rmm/exec_policy.hpp> #include <thrust/transform_reduce.h> namespace cudf::io { writer_compression_statistics collect_compression_statistics( device_span<device_span<uint8_t const> const> inputs, device_span<compression_result const> results, rmm::cuda_stream_view stream) { // bytes_written on success auto const output_size_successful = thrust::transform_reduce( rmm::exec_policy(stream), results.begin(), results.end(), [] __device__(auto& res) { return res.status == compression_status::SUCCESS ? res.bytes_written : 0; }, 0ul, thrust::plus<size_t>()); auto input_size_with_status = [inputs, results, stream](compression_status status) { auto const zipped_begin = thrust::make_zip_iterator(thrust::make_tuple(inputs.begin(), results.begin())); auto const zipped_end = zipped_begin + inputs.size(); return thrust::transform_reduce( rmm::exec_policy(stream), zipped_begin, zipped_end, [status] __device__(auto tup) { return thrust::get<1>(tup).status == status ? thrust::get<0>(tup).size() : 0; }, 0ul, thrust::plus<size_t>()); }; return writer_compression_statistics{input_size_with_status(compression_status::SUCCESS), input_size_with_status(compression_status::FAILURE), input_size_with_status(compression_status::SKIPPED), output_size_successful}; } } // namespace cudf::io
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/comp/nvcomp_adapter.cpp
/* * Copyright (c) 2022-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "nvcomp_adapter.hpp" #include "nvcomp_adapter.cuh" #include <cudf/utilities/error.hpp> #include <io/utilities/config_utils.hpp> #include <nvcomp/snappy.h> #include <mutex> #define NVCOMP_DEFLATE_HEADER <nvcomp/deflate.h> #if __has_include(NVCOMP_DEFLATE_HEADER) #include NVCOMP_DEFLATE_HEADER #endif #define NVCOMP_ZSTD_HEADER <nvcomp/zstd.h> #if __has_include(NVCOMP_ZSTD_HEADER) #include NVCOMP_ZSTD_HEADER #endif #define NVCOMP_HAS_ZSTD_DECOMP(MAJOR, MINOR, PATCH) (MAJOR > 2 or (MAJOR == 2 and MINOR >= 3)) #define NVCOMP_HAS_ZSTD_COMP(MAJOR, MINOR, PATCH) (MAJOR > 2 or (MAJOR == 2 and MINOR >= 4)) #define NVCOMP_HAS_DEFLATE(MAJOR, MINOR, PATCH) (MAJOR > 2 or (MAJOR == 2 and MINOR >= 5)) #define NVCOMP_HAS_DECOMP_TEMPSIZE_EX(MAJOR, MINOR, PATCH) \ (MAJOR > 2 or (MAJOR == 2 and MINOR > 3) or (MAJOR == 2 and MINOR == 3 and PATCH >= 1)) #define NVCOMP_HAS_COMP_TEMPSIZE_EX(MAJOR, MINOR, PATCH) (MAJOR > 2 or (MAJOR == 2 and MINOR >= 6)) // ZSTD is stable for nvcomp 2.3.2 or newer #define NVCOMP_ZSTD_DECOMP_IS_STABLE(MAJOR, MINOR, PATCH) \ (MAJOR > 2 or (MAJOR == 2 and MINOR > 3) or (MAJOR == 2 and MINOR == 3 and PATCH >= 2)) // Issue https://github.com/NVIDIA/spark-rapids/issues/6614 impacts nvCOMP 2.4.0 ZSTD decompression // on compute 6.x #define NVCOMP_ZSTD_IS_DISABLED_ON_PASCAL(MAJOR, MINOR, PATCH) \ (MAJOR == 2 and MINOR == 4 and PATCH == 0) namespace cudf::io::nvcomp { // Dispatcher for nvcompBatched<format>DecompressGetTempSizeEx template <typename... Args> std::optional<nvcompStatus_t> batched_decompress_get_temp_size_ex(compression_type compression, Args&&... args) { #if NVCOMP_HAS_DECOMP_TEMPSIZE_EX(NVCOMP_MAJOR_VERSION, NVCOMP_MINOR_VERSION, NVCOMP_PATCH_VERSION) switch (compression) { case compression_type::SNAPPY: return nvcompBatchedSnappyDecompressGetTempSizeEx(std::forward<Args>(args)...); case compression_type::ZSTD: #if NVCOMP_HAS_ZSTD_DECOMP(NVCOMP_MAJOR_VERSION, NVCOMP_MINOR_VERSION, NVCOMP_PATCH_VERSION) return nvcompBatchedZstdDecompressGetTempSizeEx(std::forward<Args>(args)...); #else return std::nullopt; #endif case compression_type::DEFLATE: [[fallthrough]]; default: return std::nullopt; } #endif return std::nullopt; } // Dispatcher for nvcompBatched<format>DecompressGetTempSize template <typename... Args> auto batched_decompress_get_temp_size(compression_type compression, Args&&... args) { switch (compression) { case compression_type::SNAPPY: return nvcompBatchedSnappyDecompressGetTempSize(std::forward<Args>(args)...); case compression_type::ZSTD: #if NVCOMP_HAS_ZSTD_DECOMP(NVCOMP_MAJOR_VERSION, NVCOMP_MINOR_VERSION, NVCOMP_PATCH_VERSION) return nvcompBatchedZstdDecompressGetTempSize(std::forward<Args>(args)...); #else CUDF_FAIL("Decompression error: " + nvcomp::is_decompression_disabled(nvcomp::compression_type::ZSTD).value()); #endif case compression_type::DEFLATE: #if NVCOMP_HAS_DEFLATE(NVCOMP_MAJOR_VERSION, NVCOMP_MINOR_VERSION, NVCOMP_PATCH_VERSION) return nvcompBatchedDeflateDecompressGetTempSize(std::forward<Args>(args)...); #else CUDF_FAIL("Decompression error: " + nvcomp::is_decompression_disabled(nvcomp::compression_type::DEFLATE).value()); #endif default: CUDF_FAIL("Unsupported compression type"); } } // Dispatcher for nvcompBatched<format>DecompressAsync template <typename... Args> auto batched_decompress_async(compression_type compression, Args&&... args) { switch (compression) { case compression_type::SNAPPY: return nvcompBatchedSnappyDecompressAsync(std::forward<Args>(args)...); case compression_type::ZSTD: #if NVCOMP_HAS_ZSTD_DECOMP(NVCOMP_MAJOR_VERSION, NVCOMP_MINOR_VERSION, NVCOMP_PATCH_VERSION) return nvcompBatchedZstdDecompressAsync(std::forward<Args>(args)...); #else CUDF_FAIL("Decompression error: " + nvcomp::is_decompression_disabled(nvcomp::compression_type::ZSTD).value()); #endif case compression_type::DEFLATE: #if NVCOMP_HAS_DEFLATE(NVCOMP_MAJOR_VERSION, NVCOMP_MINOR_VERSION, NVCOMP_PATCH_VERSION) return nvcompBatchedDeflateDecompressAsync(std::forward<Args>(args)...); #else CUDF_FAIL("Decompression error: " + nvcomp::is_decompression_disabled(nvcomp::compression_type::DEFLATE).value()); #endif default: CUDF_FAIL("Unsupported compression type"); } } std::string compression_type_name(compression_type compression) { switch (compression) { case compression_type::SNAPPY: return "Snappy"; case compression_type::ZSTD: return "Zstandard"; case compression_type::DEFLATE: return "Deflate"; } return "compression_type(" + std::to_string(static_cast<int>(compression)) + ")"; } size_t batched_decompress_temp_size(compression_type compression, size_t num_chunks, size_t max_uncomp_chunk_size, size_t max_total_uncomp_size) { size_t temp_size = 0; auto nvcomp_status = batched_decompress_get_temp_size_ex( compression, num_chunks, max_uncomp_chunk_size, &temp_size, max_total_uncomp_size); if (nvcomp_status.value_or(nvcompStatus_t::nvcompErrorInternal) != nvcompStatus_t::nvcompSuccess) { nvcomp_status = batched_decompress_get_temp_size(compression, num_chunks, max_uncomp_chunk_size, &temp_size); } CUDF_EXPECTS(nvcomp_status == nvcompStatus_t::nvcompSuccess, "Unable to get scratch size for decompression"); return temp_size; } void batched_decompress(compression_type compression, device_span<device_span<uint8_t const> const> inputs, device_span<device_span<uint8_t> const> outputs, device_span<compression_result> results, size_t max_uncomp_chunk_size, size_t max_total_uncomp_size, rmm::cuda_stream_view stream) { auto const num_chunks = inputs.size(); // cuDF inflate inputs converted to nvcomp inputs auto const nvcomp_args = create_batched_nvcomp_args(inputs, outputs, stream); rmm::device_uvector<size_t> actual_uncompressed_data_sizes(num_chunks, stream); rmm::device_uvector<nvcompStatus_t> nvcomp_statuses(num_chunks, stream); // Temporary space required for decompression auto const temp_size = batched_decompress_temp_size( compression, num_chunks, max_uncomp_chunk_size, max_total_uncomp_size); rmm::device_buffer scratch(temp_size, stream); auto const nvcomp_status = batched_decompress_async(compression, nvcomp_args.input_data_ptrs.data(), nvcomp_args.input_data_sizes.data(), nvcomp_args.output_data_sizes.data(), actual_uncompressed_data_sizes.data(), num_chunks, scratch.data(), scratch.size(), nvcomp_args.output_data_ptrs.data(), nvcomp_statuses.data(), stream.value()); CUDF_EXPECTS(nvcomp_status == nvcompStatus_t::nvcompSuccess, "unable to perform decompression"); update_compression_results(nvcomp_statuses, actual_uncompressed_data_sizes, results, stream); } // Wrapper for nvcompBatched<format>CompressGetTempSize auto batched_compress_get_temp_size(compression_type compression, size_t batch_size, size_t max_uncompressed_chunk_bytes) { size_t temp_size = 0; nvcompStatus_t nvcomp_status = nvcompStatus_t::nvcompSuccess; switch (compression) { case compression_type::SNAPPY: nvcomp_status = nvcompBatchedSnappyCompressGetTempSize( batch_size, max_uncompressed_chunk_bytes, nvcompBatchedSnappyDefaultOpts, &temp_size); break; case compression_type::DEFLATE: #if NVCOMP_HAS_DEFLATE(NVCOMP_MAJOR_VERSION, NVCOMP_MINOR_VERSION, NVCOMP_PATCH_VERSION) nvcomp_status = nvcompBatchedDeflateCompressGetTempSize( batch_size, max_uncompressed_chunk_bytes, nvcompBatchedDeflateDefaultOpts, &temp_size); break; #else CUDF_FAIL("Compression error: " + nvcomp::is_compression_disabled(nvcomp::compression_type::DEFLATE).value()); #endif case compression_type::ZSTD: #if NVCOMP_HAS_ZSTD_COMP(NVCOMP_MAJOR_VERSION, NVCOMP_MINOR_VERSION, NVCOMP_PATCH_VERSION) nvcomp_status = nvcompBatchedZstdCompressGetTempSize( batch_size, max_uncompressed_chunk_bytes, nvcompBatchedZstdDefaultOpts, &temp_size); break; #else CUDF_FAIL("Compression error: " + nvcomp::is_compression_disabled(nvcomp::compression_type::ZSTD).value()); #endif default: CUDF_FAIL("Unsupported compression type"); } CUDF_EXPECTS(nvcomp_status == nvcompStatus_t::nvcompSuccess, "Unable to get scratch size for compression"); return temp_size; } #if NVCOMP_HAS_COMP_TEMPSIZE_EX(NVCOMP_MAJOR_VERSION, NVCOMP_MINOR_VERSION, NVCOMP_PATCH_VERSION) // Wrapper for nvcompBatched<format>CompressGetTempSizeEx auto batched_compress_get_temp_size_ex(compression_type compression, size_t batch_size, size_t max_uncompressed_chunk_bytes, size_t max_total_uncompressed_bytes) { size_t temp_size = 0; nvcompStatus_t nvcomp_status = nvcompStatus_t::nvcompSuccess; switch (compression) { case compression_type::SNAPPY: nvcomp_status = nvcompBatchedSnappyCompressGetTempSizeEx(batch_size, max_uncompressed_chunk_bytes, nvcompBatchedSnappyDefaultOpts, &temp_size, max_total_uncompressed_bytes); break; case compression_type::DEFLATE: nvcomp_status = nvcompBatchedDeflateCompressGetTempSizeEx(batch_size, max_uncompressed_chunk_bytes, nvcompBatchedDeflateDefaultOpts, &temp_size, max_total_uncompressed_bytes); break; case compression_type::ZSTD: nvcomp_status = nvcompBatchedZstdCompressGetTempSizeEx(batch_size, max_uncompressed_chunk_bytes, nvcompBatchedZstdDefaultOpts, &temp_size, max_total_uncompressed_bytes); break; default: CUDF_FAIL("Unsupported compression type"); } CUDF_EXPECTS(nvcomp_status == nvcompStatus_t::nvcompSuccess, "Unable to get scratch size for compression"); return temp_size; } #endif size_t batched_compress_temp_size(compression_type compression, size_t num_chunks, size_t max_uncomp_chunk_size, size_t max_total_uncomp_size) { #if NVCOMP_HAS_COMP_TEMPSIZE_EX(NVCOMP_MAJOR_VERSION, NVCOMP_MINOR_VERSION, NVCOMP_PATCH_VERSION) try { return batched_compress_get_temp_size_ex( compression, num_chunks, max_uncomp_chunk_size, max_total_uncomp_size); } catch (...) { // Ignore errors in the expanded version; fall back to the old API in case of failure CUDF_LOG_WARN( "CompressGetTempSizeEx call failed, falling back to CompressGetTempSize; this may increase " "the memory usage"); } #endif return batched_compress_get_temp_size(compression, num_chunks, max_uncomp_chunk_size); } size_t compress_max_output_chunk_size(compression_type compression, uint32_t max_uncompressed_chunk_bytes) { auto const capped_uncomp_bytes = std::min<size_t>( compress_max_allowed_chunk_size(compression).value_or(max_uncompressed_chunk_bytes), max_uncompressed_chunk_bytes); size_t max_comp_chunk_size = 0; nvcompStatus_t status = nvcompStatus_t::nvcompSuccess; switch (compression) { case compression_type::SNAPPY: status = nvcompBatchedSnappyCompressGetMaxOutputChunkSize( capped_uncomp_bytes, nvcompBatchedSnappyDefaultOpts, &max_comp_chunk_size); break; case compression_type::DEFLATE: #if NVCOMP_HAS_DEFLATE(NVCOMP_MAJOR_VERSION, NVCOMP_MINOR_VERSION, NVCOMP_PATCH_VERSION) status = nvcompBatchedDeflateCompressGetMaxOutputChunkSize( capped_uncomp_bytes, nvcompBatchedDeflateDefaultOpts, &max_comp_chunk_size); break; #else CUDF_FAIL("Compression error: " + nvcomp::is_compression_disabled(nvcomp::compression_type::DEFLATE).value()); #endif case compression_type::ZSTD: #if NVCOMP_HAS_ZSTD_COMP(NVCOMP_MAJOR_VERSION, NVCOMP_MINOR_VERSION, NVCOMP_PATCH_VERSION) status = nvcompBatchedZstdCompressGetMaxOutputChunkSize( capped_uncomp_bytes, nvcompBatchedZstdDefaultOpts, &max_comp_chunk_size); break; #else CUDF_FAIL("Compression error: " + nvcomp::is_compression_disabled(nvcomp::compression_type::ZSTD).value()); #endif default: CUDF_FAIL("Unsupported compression type"); } CUDF_EXPECTS(status == nvcompStatus_t::nvcompSuccess, "failed to get max uncompressed chunk size"); return max_comp_chunk_size; } // Dispatcher for nvcompBatched<format>CompressAsync static void batched_compress_async(compression_type compression, void const* const* device_uncompressed_ptrs, size_t const* device_uncompressed_bytes, size_t max_uncompressed_chunk_bytes, size_t batch_size, void* device_temp_ptr, size_t temp_bytes, void* const* device_compressed_ptrs, size_t* device_compressed_bytes, rmm::cuda_stream_view stream) { nvcompStatus_t nvcomp_status = nvcompStatus_t::nvcompSuccess; switch (compression) { case compression_type::SNAPPY: nvcomp_status = nvcompBatchedSnappyCompressAsync(device_uncompressed_ptrs, device_uncompressed_bytes, max_uncompressed_chunk_bytes, batch_size, device_temp_ptr, temp_bytes, device_compressed_ptrs, device_compressed_bytes, nvcompBatchedSnappyDefaultOpts, stream.value()); break; case compression_type::DEFLATE: #if NVCOMP_HAS_DEFLATE(NVCOMP_MAJOR_VERSION, NVCOMP_MINOR_VERSION, NVCOMP_PATCH_VERSION) nvcomp_status = nvcompBatchedDeflateCompressAsync(device_uncompressed_ptrs, device_uncompressed_bytes, max_uncompressed_chunk_bytes, batch_size, device_temp_ptr, temp_bytes, device_compressed_ptrs, device_compressed_bytes, nvcompBatchedDeflateDefaultOpts, stream.value()); break; #else CUDF_FAIL("Compression error: " + nvcomp::is_compression_disabled(nvcomp::compression_type::DEFLATE).value()); #endif case compression_type::ZSTD: #if NVCOMP_HAS_ZSTD_COMP(NVCOMP_MAJOR_VERSION, NVCOMP_MINOR_VERSION, NVCOMP_PATCH_VERSION) nvcomp_status = nvcompBatchedZstdCompressAsync(device_uncompressed_ptrs, device_uncompressed_bytes, max_uncompressed_chunk_bytes, batch_size, device_temp_ptr, temp_bytes, device_compressed_ptrs, device_compressed_bytes, nvcompBatchedZstdDefaultOpts, stream.value()); break; #else CUDF_FAIL("Compression error: " + nvcomp::is_compression_disabled(nvcomp::compression_type::ZSTD).value()); #endif default: CUDF_FAIL("Unsupported compression type"); } CUDF_EXPECTS(nvcomp_status == nvcompStatus_t::nvcompSuccess, "Error in compression"); } bool is_aligned(void const* ptr, std::uintptr_t alignment) noexcept { return (reinterpret_cast<std::uintptr_t>(ptr) % alignment) == 0; } void batched_compress(compression_type compression, device_span<device_span<uint8_t const> const> inputs, device_span<device_span<uint8_t> const> outputs, device_span<compression_result> results, rmm::cuda_stream_view stream) { auto const num_chunks = inputs.size(); auto nvcomp_args = create_batched_nvcomp_args(inputs, outputs, stream); skip_unsupported_inputs( nvcomp_args.input_data_sizes, results, compress_max_allowed_chunk_size(compression), stream); auto const [max_uncomp_chunk_size, total_uncomp_size] = max_chunk_and_total_input_size(nvcomp_args.input_data_sizes, stream); auto const temp_size = batched_compress_temp_size(compression, num_chunks, max_uncomp_chunk_size, total_uncomp_size); rmm::device_buffer scratch(temp_size, stream); CUDF_EXPECTS(is_aligned(scratch.data(), 8), "Compression failed, misaligned scratch buffer"); rmm::device_uvector<size_t> actual_compressed_data_sizes(num_chunks, stream); batched_compress_async(compression, nvcomp_args.input_data_ptrs.data(), nvcomp_args.input_data_sizes.data(), max_uncomp_chunk_size, num_chunks, scratch.data(), scratch.size(), nvcomp_args.output_data_ptrs.data(), actual_compressed_data_sizes.data(), stream.value()); update_compression_results(actual_compressed_data_sizes, results, stream); } feature_status_parameters::feature_status_parameters() : lib_major_version{NVCOMP_MAJOR_VERSION}, lib_minor_version{NVCOMP_MINOR_VERSION}, lib_patch_version{NVCOMP_PATCH_VERSION}, are_all_integrations_enabled{detail::nvcomp_integration::is_all_enabled()}, are_stable_integrations_enabled{detail::nvcomp_integration::is_stable_enabled()} { int device; CUDF_CUDA_TRY(cudaGetDevice(&device)); CUDF_CUDA_TRY( cudaDeviceGetAttribute(&compute_capability_major, cudaDevAttrComputeCapabilityMajor, device)); } // Represents all parameters required to determine status of a compression/decompression feature using feature_status_inputs = std::pair<compression_type, feature_status_parameters>; struct hash_feature_status_inputs { size_t operator()(feature_status_inputs const& fsi) const { // Outside of unit tests, the same `feature_status_parameters` value will always be passed // within a run; for simplicity, only use `compression_type` for the hash return std::hash<compression_type>{}(fsi.first); } }; // Hash map type that stores feature status for different combinations of input parameters using feature_status_memo_map = std::unordered_map<feature_status_inputs, std::optional<std::string>, hash_feature_status_inputs>; std::optional<std::string> is_compression_disabled_impl(compression_type compression, feature_status_parameters params) { switch (compression) { case compression_type::DEFLATE: { if (not NVCOMP_HAS_DEFLATE( params.lib_major_version, params.lib_minor_version, params.lib_patch_version)) { return "nvCOMP 2.5 or newer is required for Deflate compression"; } if (not params.are_all_integrations_enabled) { return "DEFLATE compression is experimental, you can enable it through " "`LIBCUDF_NVCOMP_POLICY` environment variable."; } return std::nullopt; } case compression_type::SNAPPY: { if (not params.are_stable_integrations_enabled) { return "Snappy compression has been disabled through the `LIBCUDF_NVCOMP_POLICY` " "environment variable."; } return std::nullopt; } case compression_type::ZSTD: { if (not NVCOMP_HAS_ZSTD_COMP( params.lib_major_version, params.lib_minor_version, params.lib_patch_version)) { return "nvCOMP 2.4 or newer is required for Zstandard compression"; } if (not params.are_stable_integrations_enabled) { return "Zstandard compression is experimental, you can enable it through " "`LIBCUDF_NVCOMP_POLICY` environment variable."; } return std::nullopt; } default: return "Unsupported compression type"; } return "Unsupported compression type"; } std::optional<std::string> is_compression_disabled(compression_type compression, feature_status_parameters params) { static feature_status_memo_map comp_status_reason; static std::mutex memo_map_mutex; std::unique_lock memo_map_lock{memo_map_mutex}; if (auto mem_res_it = comp_status_reason.find(feature_status_inputs{compression, params}); mem_res_it != comp_status_reason.end()) { return mem_res_it->second; } // The rest of the function will execute only once per run, the memoized result will be returned // in all subsequent calls with the same compression type auto const reason = is_compression_disabled_impl(compression, params); comp_status_reason[{compression, params}] = reason; memo_map_lock.unlock(); if (reason.has_value()) { CUDF_LOG_INFO("nvCOMP is disabled for {} compression; reason: {}", compression_type_name(compression), reason.value()); } else { CUDF_LOG_INFO("nvCOMP is enabled for {} compression", compression_type_name(compression)); } return reason; } std::optional<std::string> is_zstd_decomp_disabled(feature_status_parameters const& params) { if (not NVCOMP_HAS_ZSTD_DECOMP( params.lib_major_version, params.lib_minor_version, params.lib_patch_version)) { return "nvCOMP 2.3 or newer is required for Zstandard decompression"; } if (NVCOMP_ZSTD_DECOMP_IS_STABLE( params.lib_major_version, params.lib_minor_version, params.lib_patch_version)) { if (not params.are_stable_integrations_enabled) { return "Zstandard decompression has been disabled through the `LIBCUDF_NVCOMP_POLICY` " "environment variable."; } } else if (not params.are_all_integrations_enabled) { return "Zstandard decompression is experimental, you can enable it through " "`LIBCUDF_NVCOMP_POLICY` environment variable."; } if (NVCOMP_ZSTD_IS_DISABLED_ON_PASCAL( params.lib_major_version, params.lib_minor_version, params.lib_patch_version) and params.compute_capability_major == 6) { return "Zstandard decompression is disabled on Pascal GPUs"; } return std::nullopt; } std::optional<std::string> is_decompression_disabled_impl(compression_type compression, feature_status_parameters params) { switch (compression) { case compression_type::DEFLATE: { if (not NVCOMP_HAS_DEFLATE( params.lib_major_version, params.lib_minor_version, params.lib_patch_version)) { return "nvCOMP 2.5 or newer is required for Deflate decompression"; } if (not params.are_all_integrations_enabled) { return "DEFLATE decompression is experimental, you can enable it through " "`LIBCUDF_NVCOMP_POLICY` environment variable."; } return std::nullopt; } case compression_type::SNAPPY: { if (not params.are_stable_integrations_enabled) { return "Snappy decompression has been disabled through the `LIBCUDF_NVCOMP_POLICY` " "environment variable."; } return std::nullopt; } case compression_type::ZSTD: return is_zstd_decomp_disabled(params); default: return "Unsupported compression type"; } return "Unsupported compression type"; } std::optional<std::string> is_decompression_disabled(compression_type compression, feature_status_parameters params) { static feature_status_memo_map decomp_status_reason; static std::mutex memo_map_mutex; std::unique_lock memo_map_lock{memo_map_mutex}; if (auto mem_res_it = decomp_status_reason.find(feature_status_inputs{compression, params}); mem_res_it != decomp_status_reason.end()) { return mem_res_it->second; } // The rest of the function will execute only once per run, the memoized result will be returned // in all subsequent calls with the same compression type auto const reason = is_decompression_disabled_impl(compression, params); decomp_status_reason[{compression, params}] = reason; memo_map_lock.unlock(); if (reason.has_value()) { CUDF_LOG_INFO("nvCOMP is disabled for {} decompression; reason: {}", compression_type_name(compression), reason.value()); } else { CUDF_LOG_INFO("nvCOMP is enabled for {} decompression", compression_type_name(compression)); } return reason; } size_t compress_input_alignment_bits(compression_type compression) { switch (compression) { case compression_type::DEFLATE: return 0; case compression_type::SNAPPY: return 0; case compression_type::ZSTD: return 2; default: CUDF_FAIL("Unsupported compression type"); } } size_t compress_output_alignment_bits(compression_type compression) { switch (compression) { case compression_type::DEFLATE: return 3; case compression_type::SNAPPY: return 0; case compression_type::ZSTD: return 0; default: CUDF_FAIL("Unsupported compression type"); } } std::optional<size_t> compress_max_allowed_chunk_size(compression_type compression) { switch (compression) { case compression_type::DEFLATE: return 64 * 1024; case compression_type::SNAPPY: return std::nullopt; case compression_type::ZSTD: #if NVCOMP_HAS_ZSTD_COMP(NVCOMP_MAJOR_VERSION, NVCOMP_MINOR_VERSION, NVCOMP_PATCH_VERSION) return nvcompZstdCompressionMaxAllowedChunkSize; #else CUDF_FAIL("Compression error: " + nvcomp::is_compression_disabled(nvcomp::compression_type::ZSTD).value()); #endif default: return std::nullopt; } } } // namespace cudf::io::nvcomp
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/comp/uncomp.cpp
/* * Copyright (c) 2018-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "io_uncomp.hpp" #include "nvcomp_adapter.hpp" #include "unbz2.hpp" // bz2 uncompress #include <io/utilities/hostdevice_vector.hpp> #include <cudf/detail/utilities/vector_factories.hpp> #include <cudf/utilities/error.hpp> #include <cudf/utilities/span.hpp> #include <cuda_runtime.h> #include <cstring> // memset #include <zlib.h> // uncompress using cudf::host_span; namespace cudf { namespace io { #pragma pack(push, 1) struct gz_file_header_s { uint8_t id1; // 0x1f uint8_t id2; // 0x8b uint8_t comp_mthd; // compression method (0-7=reserved, 8=deflate) uint8_t flags; // flags (GZIPHeaderFlag) uint8_t mtime[4]; // If non-zero: modification time (Unix format) uint8_t xflags; // Extra compressor-specific flags uint8_t os; // OS id }; struct zip_eocd_s // end of central directory { uint32_t sig; // 0x0605'4b50 uint16_t disk_id; // number of this disk uint16_t start_disk; // number of the disk with the start of the central directory uint16_t num_entries; // number of entries in the central dir on this disk uint16_t total_entries; // total number of entries in the central dir uint32_t cdir_size; // size of the central directory uint32_t cdir_offset; // offset of start of central directory with respect to the starting disk // number uint16_t comment_len; // comment length (excluded from struct) }; struct zip64_eocdl // end of central dir locator { uint32_t sig; // 0x0706'4b50 uint32_t disk_start; // number of the disk with the start of the zip64 end of central directory uint64_t eocdr_ofs; // relative offset of the zip64 end of central directory record uint32_t num_disks; // total number of disks }; struct zip_cdfh_s // central directory file header { uint32_t sig; // 0x0201'4b50 uint16_t ver; // version made by uint16_t min_ver; // version needed to extract uint16_t gp_flags; // general purpose bit flag uint16_t comp_method; // compression method uint16_t file_time; // last mod file time uint16_t file_date; // last mod file date uint32_t crc32; // crc - 32 uint32_t comp_size; // compressed size uint32_t uncomp_size; // uncompressed size uint16_t fname_len; // filename length uint16_t extra_len; // extra field length uint16_t comment_len; // file comment length uint16_t start_disk; // disk number start uint16_t int_fattr; // internal file attributes uint32_t ext_fattr; // external file attributes uint32_t hdr_ofs; // relative offset of local header }; struct zip_lfh_s { uint32_t sig; // 0x0403'4b50 uint16_t ver; // version needed to extract uint16_t gp_flags; // general purpose bit flag uint16_t comp_method; // compression method uint16_t file_time; // last mod file time uint16_t file_date; // last mod file date uint32_t crc32; // crc - 32 uint32_t comp_size; // compressed size uint32_t uncomp_size; // uncompressed size uint16_t fname_len; // filename length uint16_t extra_len; // extra field length }; struct bz2_file_header_s { uint8_t sig[3]; // "BZh" uint8_t blksz; // block size 1..9 in 100kB units (post-RLE) }; #pragma pack(pop) struct gz_archive_s { gz_file_header_s const* fhdr; uint16_t hcrc16; // header crc16 if present uint16_t xlen; uint8_t const* fxtra; // xlen bytes (optional) uint8_t const* fname; // zero-terminated original filename if present uint8_t const* fcomment; // zero-terminated comment if present uint8_t const* comp_data; // compressed data size_t comp_len; // Compressed data length uint32_t crc32; // CRC32 of uncompressed data uint32_t isize; // Input size modulo 2^32 }; struct zip_archive_s { zip_eocd_s const* eocd; // end of central directory zip64_eocdl const* eocdl; // end of central dir locator (optional) zip_cdfh_s const* cdfh; // start of central directory file headers }; bool ParseGZArchive(gz_archive_s* dst, uint8_t const* raw, size_t len) { gz_file_header_s const* fhdr; if (!dst) return false; memset(dst, 0, sizeof(gz_archive_s)); if (len < sizeof(gz_file_header_s) + 8) return false; fhdr = reinterpret_cast<gz_file_header_s const*>(raw); if (fhdr->id1 != 0x1f || fhdr->id2 != 0x8b) return false; dst->fhdr = fhdr; raw += sizeof(gz_file_header_s); len -= sizeof(gz_file_header_s); if (fhdr->flags & GZIPHeaderFlag::fextra) { uint32_t xlen; if (len < 2) return false; xlen = raw[0] | (raw[1] << 8); raw += 2; len -= 2; if (len < xlen) return false; dst->xlen = (uint16_t)xlen; dst->fxtra = raw; raw += xlen; len -= xlen; } if (fhdr->flags & GZIPHeaderFlag::fname) { size_t l = 0; uint8_t c; do { if (l >= len) return false; c = raw[l]; l++; } while (c != 0); dst->fname = raw; raw += l; len -= l; } if (fhdr->flags & GZIPHeaderFlag::fcomment) { size_t l = 0; uint8_t c; do { if (l >= len) return false; c = raw[l]; l++; } while (c != 0); dst->fcomment = raw; raw += l; len -= l; } if (fhdr->flags & GZIPHeaderFlag::fhcrc) { if (len < 2) return false; dst->hcrc16 = raw[0] | (raw[1] << 8); raw += 2; len -= 2; } if (len < 8) return false; dst->crc32 = raw[len - 8] | (raw[len - 7] << 8) | (raw[len - 6] << 16) | (raw[len - 5] << 24); dst->isize = raw[len - 4] | (raw[len - 3] << 8) | (raw[len - 2] << 16) | (raw[len - 1] << 24); len -= 8; dst->comp_data = raw; dst->comp_len = len; return (fhdr->comp_mthd == 8 && len > 0); } bool OpenZipArchive(zip_archive_s* dst, uint8_t const* raw, size_t len) { memset(dst, 0, sizeof(zip_archive_s)); // Find the end of central directory if (len >= sizeof(zip_eocd_s) + 2) { for (ptrdiff_t i = len - sizeof(zip_eocd_s) - 2; i + sizeof(zip_eocd_s) + 2 + 0xffff >= len && i >= 0; i--) { auto const* eocd = reinterpret_cast<zip_eocd_s const*>(raw + i); if (eocd->sig == 0x0605'4b50 && eocd->disk_id == eocd->start_disk // multi-file archives not supported && eocd->num_entries == eocd->total_entries && eocd->cdir_size >= sizeof(zip_cdfh_s) * eocd->num_entries && eocd->cdir_offset < len && i + *reinterpret_cast<uint16_t const*>(eocd + 1) <= static_cast<ptrdiff_t>(len)) { auto const* cdfh = reinterpret_cast<zip_cdfh_s const*>(raw + eocd->cdir_offset); dst->eocd = eocd; if (i >= static_cast<ptrdiff_t>(sizeof(zip64_eocdl))) { auto const* eocdl = reinterpret_cast<zip64_eocdl const*>(raw + i - sizeof(zip64_eocdl)); if (eocdl->sig == 0x0706'4b50) { dst->eocdl = eocdl; } } // Start of central directory if (cdfh->sig == 0x0201'4b50) { dst->cdfh = cdfh; } } } } return (dst->eocd && dst->cdfh); } int cpu_inflate(uint8_t* uncomp_data, size_t* destLen, uint8_t const* comp_data, size_t comp_len) { int zerr; z_stream strm; memset(&strm, 0, sizeof(strm)); strm.next_in = const_cast<Bytef*>(reinterpret_cast<Bytef const*>(comp_data)); strm.avail_in = comp_len; strm.total_in = 0; strm.next_out = uncomp_data; strm.avail_out = *destLen; strm.total_out = 0; zerr = inflateInit2(&strm, -15); // -15 for raw data without GZIP headers if (zerr != 0) { *destLen = 0; return zerr; } zerr = inflate(&strm, Z_FINISH); *destLen = strm.total_out; inflateEnd(&strm); return (zerr == Z_STREAM_END) ? Z_OK : zerr; } /** * @brief Uncompresses a raw DEFLATE stream to a char vector. * The vector will be grown to match the uncompressed size * Optimized for the case where the initial size is the uncompressed * size truncated to 32-bit, and grows the buffer in 1GB increments. * * @param[out] dst Destination vector * @param[in] comp_data Raw compressed data * @param[in] comp_len Compressed data size */ void cpu_inflate_vector(std::vector<uint8_t>& dst, uint8_t const* comp_data, size_t comp_len) { z_stream strm{}; strm.next_in = const_cast<Bytef*>(reinterpret_cast<Bytef const*>(comp_data)); strm.avail_in = comp_len; strm.total_in = 0; strm.next_out = dst.data(); strm.avail_out = dst.size(); strm.total_out = 0; auto zerr = inflateInit2(&strm, -15); // -15 for raw data without GZIP headers CUDF_EXPECTS(zerr == 0, "Error in DEFLATE stream"); do { if (strm.avail_out == 0) { dst.resize(strm.total_out + (1 << 30)); strm.avail_out = dst.size() - strm.total_out; strm.next_out = reinterpret_cast<uint8_t*>(dst.data()) + strm.total_out; } zerr = inflate(&strm, Z_SYNC_FLUSH); } while ((zerr == Z_BUF_ERROR || zerr == Z_OK) && strm.avail_out == 0 && strm.total_out == dst.size()); dst.resize(strm.total_out); inflateEnd(&strm); CUDF_EXPECTS(zerr == Z_STREAM_END, "Error in DEFLATE stream"); } std::vector<uint8_t> decompress(compression_type compression, host_span<uint8_t const> src) { CUDF_EXPECTS(src.data() != nullptr, "Decompression: Source cannot be nullptr"); CUDF_EXPECTS(not src.empty(), "Decompression: Source size cannot be 0"); auto raw = src.data(); uint8_t const* comp_data = nullptr; size_t comp_len = 0; size_t uncomp_len = 0; switch (compression) { case compression_type::AUTO: case compression_type::GZIP: { gz_archive_s gz; if (ParseGZArchive(&gz, raw, src.size())) { compression = compression_type::GZIP; comp_data = gz.comp_data; comp_len = gz.comp_len; uncomp_len = gz.isize; } if (compression != compression_type::AUTO) break; [[fallthrough]]; } case compression_type::ZIP: { zip_archive_s za; if (OpenZipArchive(&za, raw, src.size())) { size_t cdfh_ofs = 0; for (int i = 0; i < za.eocd->num_entries; i++) { zip_cdfh_s const* cdfh = reinterpret_cast<zip_cdfh_s const*>( reinterpret_cast<uint8_t const*>(za.cdfh) + cdfh_ofs); int cdfh_len = sizeof(zip_cdfh_s) + cdfh->fname_len + cdfh->extra_len + cdfh->comment_len; if (cdfh_ofs + cdfh_len > za.eocd->cdir_size || cdfh->sig != 0x0201'4b50) { // Bad cdir break; } // For now, only accept with non-zero file sizes and DEFLATE if (cdfh->comp_method == 8 && cdfh->comp_size > 0 && cdfh->uncomp_size > 0) { size_t lfh_ofs = cdfh->hdr_ofs; zip_lfh_s const* lfh = reinterpret_cast<zip_lfh_s const*>(raw + lfh_ofs); if (lfh_ofs + sizeof(zip_lfh_s) <= src.size() && lfh->sig == 0x0403'4b50 && lfh_ofs + sizeof(zip_lfh_s) + lfh->fname_len + lfh->extra_len <= src.size()) { if (lfh->comp_method == 8 && lfh->comp_size > 0 && lfh->uncomp_size > 0) { size_t file_start = lfh_ofs + sizeof(zip_lfh_s) + lfh->fname_len + lfh->extra_len; size_t file_end = file_start + lfh->comp_size; if (file_end <= src.size()) { // Pick the first valid file of non-zero size (only 1 file expected in archive) compression = compression_type::ZIP; comp_data = raw + file_start; comp_len = lfh->comp_size; uncomp_len = lfh->uncomp_size; break; } } } } cdfh_ofs += cdfh_len; } } } if (compression != compression_type::AUTO) break; [[fallthrough]]; case compression_type::BZIP2: if (src.size() > 4) { bz2_file_header_s const* fhdr = reinterpret_cast<bz2_file_header_s const*>(raw); // Check for BZIP2 file signature "BZh1" to "BZh9" if (fhdr->sig[0] == 'B' && fhdr->sig[1] == 'Z' && fhdr->sig[2] == 'h' && fhdr->blksz >= '1' && fhdr->blksz <= '9') { compression = compression_type::BZIP2; comp_data = raw; comp_len = src.size(); uncomp_len = 0; } } if (compression != compression_type::AUTO) break; [[fallthrough]]; default: CUDF_FAIL("Unsupported compressed stream type"); } CUDF_EXPECTS(comp_data != nullptr and comp_len > 0, "Unsupported compressed stream type"); if (uncomp_len <= 0) { uncomp_len = comp_len * 4 + 4096; // In case uncompressed size isn't known in advance, assume // ~4:1 compression for initial size } if (compression == compression_type::GZIP || compression == compression_type::ZIP) { // INFLATE std::vector<uint8_t> dst(uncomp_len); cpu_inflate_vector(dst, comp_data, comp_len); return dst; } if (compression == compression_type::BZIP2) { size_t src_ofs = 0; size_t dst_ofs = 0; int bz_err = 0; std::vector<uint8_t> dst(uncomp_len); do { size_t dst_len = uncomp_len - dst_ofs; bz_err = cpu_bz2_uncompress(comp_data, comp_len, dst.data() + dst_ofs, &dst_len, &src_ofs); if (bz_err == BZ_OUTBUFF_FULL) { // TBD: We could infer the compression ratio based on produced/consumed byte counts // in order to minimize realloc events and over-allocation dst_ofs = dst_len; dst_len = uncomp_len + (uncomp_len / 2); dst.resize(dst_len); uncomp_len = dst_len; } else if (bz_err == 0) { uncomp_len = dst_len; dst.resize(uncomp_len); } } while (bz_err == BZ_OUTBUFF_FULL); CUDF_EXPECTS(bz_err == 0, "Decompression: error in stream"); return dst; } CUDF_FAIL("Unsupported compressed stream type"); } /** * @brief ZLIB host decompressor (no header) */ size_t decompress_zlib(host_span<uint8_t const> src, host_span<uint8_t> dst) { size_t uncomp_size = dst.size(); CUDF_EXPECTS(0 == cpu_inflate(dst.data(), &uncomp_size, src.data(), src.size()), "ZLIB decompression failed"); return uncomp_size; } /** * @brief GZIP host decompressor (includes header) */ size_t decompress_gzip(host_span<uint8_t const> src, host_span<uint8_t> dst) { gz_archive_s gz; auto const parse_succeeded = ParseGZArchive(&gz, src.data(), src.size()); CUDF_EXPECTS(parse_succeeded, "Failed to parse GZIP header"); return decompress_zlib({gz.comp_data, gz.comp_len}, dst); } /** * @brief SNAPPY host decompressor */ size_t decompress_snappy(host_span<uint8_t const> src, host_span<uint8_t> dst) { CUDF_EXPECTS(not dst.empty() and src.size() >= 1, "invalid Snappy decompress inputs"); uint32_t uncompressed_size, bytes_left, dst_pos; auto cur = src.begin(); auto const end = src.end(); // Read uncompressed length (varint) { uint32_t l = 0, c; uncompressed_size = 0; do { c = *cur++; auto const lo7 = c & 0x7f; if (l >= 28 && c > 0xf) { return 0; } uncompressed_size |= lo7 << l; l += 7; } while (c > 0x7f && cur < end); CUDF_EXPECTS(uncompressed_size != 0 and uncompressed_size <= dst.size() and cur < end, "Destination buffer too small"); } // Decode lz77 dst_pos = 0; bytes_left = uncompressed_size; do { uint32_t blen = *cur++; if (blen & 3) { // Copy uint32_t offset; if (blen & 2) { // xxxxxx1x: copy with 6-bit length, 2-byte or 4-byte offset if (cur + 2 > end) break; offset = *reinterpret_cast<uint16_t const*>(cur); cur += 2; if (blen & 1) // 4-byte offset { if (cur + 2 > end) break; offset |= (*reinterpret_cast<uint16_t const*>(cur)) << 16; cur += 2; } blen = (blen >> 2) + 1; } else { // xxxxxx01.oooooooo: copy with 3-bit length, 11-bit offset if (cur >= end) break; offset = ((blen & 0xe0) << 3) | (*cur++); blen = ((blen >> 2) & 7) + 4; } if (offset - 1u >= dst_pos || blen > bytes_left) break; bytes_left -= blen; do { dst[dst_pos] = dst[dst_pos - offset]; dst_pos++; } while (--blen); } else { // xxxxxx00: literal blen >>= 2; if (blen >= 60) { uint32_t const num_bytes = blen - 59; if (cur + num_bytes >= end) break; blen = cur[0]; if (num_bytes > 1) { blen |= cur[1] << 8; if (num_bytes > 2) { blen |= cur[2] << 16; if (num_bytes > 3) { blen |= cur[3] << 24; } } } cur += num_bytes; } blen++; if (cur + blen > end || blen > bytes_left) break; memcpy(dst.data() + dst_pos, cur, blen); cur += blen; dst_pos += blen; bytes_left -= blen; } } while (bytes_left && cur < end); CUDF_EXPECTS(bytes_left == 0, "Snappy Decompression failed"); return uncompressed_size; } /** * @brief ZSTD decompressor that uses nvcomp */ size_t decompress_zstd(host_span<uint8_t const> src, host_span<uint8_t> dst, rmm::cuda_stream_view stream) { // Init device span of spans (source) auto const d_src = cudf::detail::make_device_uvector_async(src, stream, rmm::mr::get_current_device_resource()); auto hd_srcs = cudf::detail::hostdevice_vector<device_span<uint8_t const>>(1, stream); hd_srcs[0] = d_src; hd_srcs.host_to_device_async(stream); // Init device span of spans (temporary destination) auto d_dst = rmm::device_uvector<uint8_t>(dst.size(), stream); auto hd_dsts = cudf::detail::hostdevice_vector<device_span<uint8_t>>(1, stream); hd_dsts[0] = d_dst; hd_dsts.host_to_device_async(stream); auto hd_stats = cudf::detail::hostdevice_vector<compression_result>(1, stream); hd_stats[0] = compression_result{0, compression_status::FAILURE}; hd_stats.host_to_device_async(stream); auto const max_uncomp_page_size = dst.size(); nvcomp::batched_decompress(nvcomp::compression_type::ZSTD, hd_srcs, hd_dsts, hd_stats, max_uncomp_page_size, max_uncomp_page_size, stream); hd_stats.device_to_host_sync(stream); CUDF_EXPECTS(hd_stats[0].status == compression_status::SUCCESS, "ZSTD decompression failed"); // Copy temporary output to `dst` CUDF_CUDA_TRY(cudaMemcpyAsync( dst.data(), d_dst.data(), hd_stats[0].bytes_written, cudaMemcpyDefault, stream.value())); return hd_stats[0].bytes_written; } size_t decompress(compression_type compression, host_span<uint8_t const> src, host_span<uint8_t> dst, rmm::cuda_stream_view stream) { switch (compression) { case compression_type::GZIP: return decompress_gzip(src, dst); case compression_type::ZLIB: return decompress_zlib(src, dst); case compression_type::SNAPPY: return decompress_snappy(src, dst); case compression_type::ZSTD: return decompress_zstd(src, dst, stream); default: CUDF_FAIL("Unsupported compression type"); } } } // namespace io } // namespace cudf
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/comp/cpu_unbz2.cpp
/* * Copyright (c) 2018-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * cpu_unbz2.cpp * * Heavily based on libbzip2's decompress.c (original copyright notice below) * Modified version for memory-to-memory decompression. * * bzip2 license information is available at * https://spdx.org/licenses/bzip2-1.0.6.html * https://github.com/asimonov-im/bzip2/blob/master/LICENSE * original source code available at * http://www.sourceware.org/bzip2/ */ /*-- Copyright (C) 1996-2002 Julian R Seward. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required. 3. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software. 4. The name of the author may not be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Julian Seward, Cambridge, UK. [email protected] bzip2/libbzip2 version 1.0 of 21 March 2000 This program is based on (at least) the work of: Mike Burrows David Wheeler Peter Fenwick Alistair Moffat Radford Neal Ian H. Witten Robert Sedgewick Jon L. Bentley For more information on these sources, see the manual. --*/ #include "io_uncomp.hpp" #include "unbz2.hpp" #include <cstdio> #include <cstdlib> #include <vector> namespace cudf { namespace io { // Constants for the fast MTF decoder. #define MTFA_SIZE 4096 #define MTFL_SIZE 16 // Header bytes. #define BZ_HDR_B 0x42 /* 'B' */ #define BZ_HDR_Z 0x5a /* 'Z' */ #define BZ_HDR_h 0x68 /* 'h' */ #define BZ_HDR_0 0x30 /* '0' */ // Constants for the back end. #define BZ_MAX_ALPHA_SIZE 258 #define BZ_MAX_CODE_LEN 23 #define BZ_RUNA 0 #define BZ_RUNB 1 #define BZ_N_GROUPS 6 #define BZ_G_SIZE 50 #define BZ_MAX_SELECTORS (2 + (900000 / BZ_G_SIZE)) using huff_s = struct { int32_t minLen; int32_t limit[BZ_MAX_CODE_LEN]; int32_t base[BZ_MAX_CODE_LEN]; uint16_t perm[BZ_MAX_ALPHA_SIZE]; }; // Decoder state using unbz_state_s = struct { // Input uint8_t const* cur; uint8_t const* end; uint8_t const* base; uint64_t bitbuf; uint32_t bitpos; // Output uint8_t* out; uint8_t* outend; uint8_t* outbase; // misc administratium uint32_t blockSize100k; int32_t currBlockNo; int32_t save_nblock; // for undoing the Burrows-Wheeler transform std::vector<uint32_t> tt; uint32_t origPtr; int32_t nblock_used; int32_t unzftab[256]; // map of bytes used in block uint8_t seqToUnseq[256]; // for decoding the MTF values int32_t mtfbase[256 / MTFL_SIZE]; uint8_t mtfa[MTFA_SIZE]; uint8_t selector[BZ_MAX_SELECTORS]; uint8_t len[BZ_MAX_ALPHA_SIZE]; huff_s ht[BZ_N_GROUPS]; }; // return next 32 bits static inline uint32_t next32bits(unbz_state_s const* s) { return (uint32_t)((s->bitbuf << s->bitpos) >> 32); } // return next n bits static inline uint32_t showbits(unbz_state_s const* s, uint32_t n) { return (uint32_t)((s->bitbuf << s->bitpos) >> (64 - n)); } // update bit position, refill bit buffer if necessary static void skipbits(unbz_state_s* s, uint32_t n) { uint32_t bitpos = s->bitpos + n; if (bitpos >= 32) { uint8_t const* cur = s->cur + 4; uint32_t next32 = (cur + 4 < s->end) ? __builtin_bswap32(*reinterpret_cast<uint32_t const*>(cur + 4)) : 0; s->cur = cur; s->bitbuf = (s->bitbuf << 32) | next32; bitpos &= 0x1f; } s->bitpos = bitpos; } static inline uint32_t getbits(unbz_state_s* s, uint32_t n) { uint32_t bits = showbits(s, n); skipbits(s, n); return bits; } /*---------------------------------------------------*/ int32_t bz2_decompress_block(unbz_state_s* s) { int nInUse; int32_t i; int32_t j; int32_t t; int32_t alphaSize; int32_t nGroups; int32_t nSelectors; int32_t EOB; int32_t groupNo; int32_t groupPos; uint32_t nblock, nblockMAX; huff_s const* gSel = nullptr; uint32_t inUse16; uint32_t sig0, sig1; // Start-of-block signature sig0 = getbits(s, 24); sig1 = getbits(s, 24); if (sig0 != 0x31'4159 || sig1 != 0x26'5359) { return BZ_DATA_ERROR; } s->currBlockNo++; skipbits(s, 32); // block CRC if (getbits(s, 1)) return BZ_DATA_ERROR; // blockRandomized not supported (old bzip versions) s->origPtr = getbits(s, 24); if (s->origPtr < 0 || s->origPtr > 10 + 100000 * s->blockSize100k) return BZ_DATA_ERROR; // Receive the mapping table inUse16 = getbits(s, 16); nInUse = 0; for (i = 0; i < 16; i++, inUse16 <<= 1) { if (inUse16 & 0x8000) { uint32_t inUse = getbits(s, 16); for (j = 0; j < 16; j++, inUse <<= 1) { if (inUse & 0x8000) { s->seqToUnseq[nInUse++] = (i << 4) + j; } } } } if (nInUse == 0) return BZ_DATA_ERROR; alphaSize = nInUse + 2; // Now the selectors { uint32_t pos; // BZ_N_GROUPS * 4-bit nGroups = getbits(s, 3); nSelectors = getbits(s, 15); if (nGroups < 2 || nGroups > 6 || nSelectors < 1 || nSelectors > BZ_MAX_SELECTORS) return BZ_DATA_ERROR; pos = 0x7654'3210; for (i = 0; i < nSelectors; i++) { uint32_t selectorMtf = 0, mask, tmp; for (int32_t v = next32bits(s); v < 0; v <<= 1) { if (++selectorMtf >= (uint32_t)nGroups) return BZ_DATA_ERROR; } skipbits(s, selectorMtf + 1); // Undo the MTF values for the selectors. tmp = (pos >> (selectorMtf * 4)) & 0xf; s->selector[i] = tmp; mask = (1 << ((selectorMtf * 4) + 4)) - 1; pos = (pos & ~mask) | ((pos << 4) & mask) | tmp; } } // Now the coding tables for (t = 0; t < nGroups; t++) { int32_t pp, vec; uint8_t* length = &s->len[0]; int32_t curr = getbits(s, 5); int32_t minLen = BZ_MAX_CODE_LEN - 1; int32_t maxLen = 0; huff_s* sel = &s->ht[t]; for (i = 0; i < alphaSize; i++) { for (;;) { uint32_t v = showbits(s, 2); if (curr < 1 || curr > 20) return BZ_DATA_ERROR; if (v < 2) { skipbits(s, 1); break; } else { skipbits(s, 2); curr += 1 - (v & 1) * 2; } } length[i] = curr; if (curr > maxLen) maxLen = curr; if (curr < minLen) minLen = curr; } // Create the Huffman decoding tables for this group pp = 0; for (i = minLen; i <= maxLen; i++) for (j = 0; j < alphaSize; j++) if (length[j] == i) { sel->perm[pp] = j; pp++; }; for (i = 0; i < BZ_MAX_CODE_LEN; i++) { sel->base[i] = 0; sel->limit[i] = 0; } for (i = 0; i < alphaSize; i++) sel->base[length[i] + 1]++; for (i = 1; i < BZ_MAX_CODE_LEN; i++) sel->base[i] += sel->base[i - 1]; vec = 0; for (i = minLen; i <= maxLen; i++) { vec += (sel->base[i + 1] - sel->base[i]); sel->limit[i] = vec - 1; vec <<= 1; } for (i = minLen + 1; i <= maxLen; i++) sel->base[i] = ((sel->limit[i - 1] + 1) << 1) - sel->base[i]; sel->minLen = minLen; } // Now the MTF values EOB = nInUse + 1; nblockMAX = 100000 * s->blockSize100k; for (i = 0; i <= 255; i++) s->unzftab[i] = 0; // MTF init { int32_t kk = MTFA_SIZE - 1; for (int32_t ii = 256 / MTFL_SIZE - 1; ii >= 0; ii--) { for (int32_t jj = MTFL_SIZE - 1; jj >= 0; jj--) { s->mtfa[kk--] = (uint8_t)(ii * MTFL_SIZE + jj); } s->mtfbase[ii] = kk + 1; } } // end MTF init nblock = 0; groupNo = -1; groupPos = 0; for (;;) { uint32_t es = 0; uint32_t N = 1; uint32_t nextSym, nn, uc; for (;;) { uint32_t next32, zvec; int32_t zn; if (groupPos == 0) { if (++groupNo >= nSelectors) return BZ_DATA_ERROR; groupPos = BZ_G_SIZE; gSel = &s->ht[s->selector[groupNo]]; } groupPos--; next32 = next32bits(s); zn = gSel->minLen; for (;;) { zvec = next32 >> (32u - (uint32_t)zn); if (zn > 20) // the longest code return BZ_DATA_ERROR; if (zvec <= (uint32_t)gSel->limit[zn]) break; zn++; } skipbits(s, zn); zvec -= gSel->base[zn]; if (zvec >= BZ_MAX_ALPHA_SIZE) return BZ_DATA_ERROR; nextSym = gSel->perm[zvec]; if (nextSym > BZ_RUNB) break; es += N << nextSym; N <<= 1; } if (es > 0) { if (nblock + es > nblockMAX) return BZ_DATA_ERROR; uc = s->seqToUnseq[s->mtfa[s->mtfbase[0]]]; s->unzftab[uc] += es; do { s->tt[nblock++] = uc; } while (--es); } if (nextSym == static_cast<uint32_t>(EOB)) break; if (nblock >= nblockMAX) return BZ_DATA_ERROR; nn = nextSym - 1; // uc = MTF ( nextSym-1 ) if (nn < MTFL_SIZE) { // avoid general-case expense int32_t pp = s->mtfbase[0]; uc = s->mtfa[pp + nn]; while (nn > 3) { int32_t z = pp + nn; s->mtfa[(z)] = s->mtfa[(z)-1]; s->mtfa[(z)-1] = s->mtfa[(z)-2]; s->mtfa[(z)-2] = s->mtfa[(z)-3]; s->mtfa[(z)-3] = s->mtfa[(z)-4]; nn -= 4; } while (nn > 0) { s->mtfa[(pp + nn)] = s->mtfa[(pp + nn) - 1]; nn--; }; s->mtfa[pp] = uc; } else { // general case int32_t lno = nn / MTFL_SIZE; int32_t off = nn % MTFL_SIZE; int32_t pp = s->mtfbase[lno] + off; uc = s->mtfa[pp]; while (pp > s->mtfbase[lno]) { s->mtfa[pp] = s->mtfa[pp - 1]; pp--; }; s->mtfbase[lno]++; while (lno > 0) { s->mtfbase[lno]--; s->mtfa[s->mtfbase[lno]] = s->mtfa[s->mtfbase[lno - 1] + MTFL_SIZE - 1]; lno--; } s->mtfbase[0]--; s->mtfa[s->mtfbase[0]] = uc; if (s->mtfbase[0] == 0) { int kk = MTFA_SIZE - 1; for (int ii = 256 / MTFL_SIZE - 1; ii >= 0; ii--) { for (int jj = MTFL_SIZE - 1; jj >= 0; jj--) { s->mtfa[kk] = s->mtfa[s->mtfbase[ii] + jj]; kk--; } s->mtfbase[ii] = kk + 1; } } } uc = s->seqToUnseq[uc]; s->unzftab[uc]++; s->tt[nblock++] = uc; } // Now we know what nblock is, we can do a better sanity check on s->origPtr. if (s->origPtr < 0 || s->origPtr >= nblock) return BZ_DATA_ERROR; // compute the T^(-1) vector { int32_t prev = s->unzftab[0]; s->unzftab[0] = 0; for (i = 1; i < 256; i++) { int32_t tmp = s->unzftab[i]; s->unzftab[i] = prev + s->unzftab[i - 1]; prev = tmp; } for (i = 0; i < (int)nblock; i++) { int uc = (s->tt[i] & 0xff); s->tt[s->unzftab[uc]] |= (i << 8); s->unzftab[uc]++; } } s->save_nblock = nblock; // Verify the end-of-block signature: should be followed by another block or an end-of-stream // signature { uint8_t const* save_cur = s->cur; uint64_t save_bitbuf = s->bitbuf; uint32_t save_bitpos = s->bitpos; sig0 = getbits(s, 24); sig1 = getbits(s, 24); if (sig0 == 0x31'4159 && sig1 == 0x26'5359) { // Start of another block: restore bitstream location s->cur = save_cur; s->bitbuf = save_bitbuf; s->bitpos = save_bitpos; return BZ_OK; } else if (sig0 == 0x17'7245 && sig1 == 0x38'5090) { // End-of-stream signature return BZ_STREAM_END; } else { return BZ_DATA_ERROR; } } } static void bzUnRLE(unbz_state_s* s) { uint8_t* out = s->out; uint8_t* outend = s->outend; int32_t rle_cnt = s->save_nblock; int cprev = -1; std::vector<uint32_t>& tt = s->tt; uint32_t pos = tt[s->origPtr] >> 8; int mask = ~0; s->nblock_used = rle_cnt + 1; while (rle_cnt > 0) { int c; rle_cnt--; pos = tt[pos]; c = (pos & 0xff); pos >>= 8; if (out < outend) { *out = c; } out++; mask = (mask * 2 + (c != cprev)) & 7; cprev = c; if (!mask) { int run; if (--rle_cnt < 0) { printf("run split across blocks! (unsupported)\n"); break; } pos = tt[pos]; run = (pos & 0xff); pos >>= 8; for (int i = 0; i < run; i++) { if (out + i < outend) out[i] = c; } out += run; cprev = -1; } } s->out = out; } int32_t cpu_bz2_uncompress( uint8_t const* source, size_t sourceLen, uint8_t* dest, size_t* destLen, uint64_t* block_start) { unbz_state_s s{}; uint32_t v; int ret; size_t last_valid_block_in, last_valid_block_out; if (dest == nullptr || destLen == nullptr || source == nullptr || sourceLen < 12) return BZ_PARAM_ERROR; s.currBlockNo = 0; s.cur = source; s.base = source; s.end = source + sourceLen - 4; // We will not read the final combined CRC (last 4 bytes of the file) s.bitbuf = __builtin_bswap64(*reinterpret_cast<uint64_t const*>(source)); s.bitpos = 0; s.out = dest; s.outend = dest + *destLen; s.outbase = dest; s.save_nblock = 0; v = getbits(&s, 24); if (v != (('B' << 16) | ('Z' << 8) | 'h')) return BZ_DATA_ERROR_MAGIC; v = getbits(&s, 8) - '0'; if (v < 1 || v > 9) return BZ_DATA_ERROR_MAGIC; s.blockSize100k = v; last_valid_block_in = 0; last_valid_block_out = 0; if (block_start) { uint64_t bit_offs = *block_start; if (bit_offs > 32) // 32-bits are used for the file header (0..32 is considered as first block) { s.cur = source + (size_t)(bit_offs >> 3); s.bitpos = (uint32_t)(bit_offs & 7); if (s.cur + 8 > s.end) return BZ_PARAM_ERROR; s.bitbuf = __builtin_bswap64(*reinterpret_cast<uint64_t const*>(s.cur)); } } s.tt.resize(s.blockSize100k * 100000); do { last_valid_block_in = ((s.cur - s.base) << 3) + (s.bitpos); last_valid_block_out = s.out - s.outbase; ret = bz2_decompress_block(&s); if (ret == BZ_OK || ret == BZ_STREAM_END) { bzUnRLE(&s); if (s.nblock_used != s.save_nblock + 1 || s.out > s.outend) { ret = (s.out < s.outend) ? BZ_UNEXPECTED_EOF : BZ_OUTBUFF_FULL; } } } while (ret == BZ_OK); if (ret == BZ_STREAM_END) { // normal termination last_valid_block_in = ((s.cur - s.base) << 3) + (s.bitpos); last_valid_block_out = s.out - s.outbase; ret = BZ_OK; } *destLen = last_valid_block_out; if (block_start) { *block_start = last_valid_block_in; } return ret; } } // namespace io } // namespace cudf
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/comp/debrotli.cu
/* * Copyright (c) 2018-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** @file debrotli.cu * * CUDA-based brotli decompression * * Brotli Compressed Data Format * https://tools.ietf.org/html/rfc7932 * * Portions of this file are derived from Google's Brotli project at * https://github.com/google/brotli, original license text below. */ /* Copyright 2013 Google Inc. All Rights Reserved. Distributed under MIT license. See file LICENSE for detail or copy at https://opensource.org/licenses/MIT */ /* Copyright(c) 2009, 2010, 2013 - 2016 by the Brotli Authors. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files(the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and / or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions : The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ #include "brotli_dict.hpp" #include "gpuinflate.hpp" #include <io/utilities/block_utils.cuh> #include <cudf/utilities/error.hpp> #include <rmm/cuda_stream_view.hpp> namespace cudf { namespace io { constexpr uint32_t huffman_lookup_table_width = 8; constexpr int8_t brotli_code_length_codes = 18; constexpr uint32_t brotli_num_distance_short_codes = 16; constexpr uint32_t brotli_max_allowed_distance = 0x7FFF'FFFC; constexpr int block_size = 256; template <typename T0, typename T1> inline __device__ uint16_t huffcode(T0 len, T1 sym) { return (uint16_t)(((sym) << 4) + (len)); } inline __device__ uint32_t brotli_distance_alphabet_size(uint8_t npostfix, uint32_t ndirect, uint32_t maxnbits) { return brotli_num_distance_short_codes + ndirect + (maxnbits << (npostfix + 1)); } inline __device__ uint32_t brev8(uint32_t x) { return (__brev(x) >> 24u); // kReverseBits[x] } #define CONSTANT static const __device__ __constant__ #include "brotli_tables.hpp" /* typeof(MODE) == ContextType; returns ContextLut */ __inline__ __device__ int brotli_context_lut(int mode) { return (mode << 9); } inline __device__ uint8_t brotli_transform_type(int idx) { return kTransformsData[(idx * 3) + 1]; } inline __device__ uint8_t const* brotli_transform_prefix(int idx) { return &kPrefixSuffix[kPrefixSuffixMap[kTransformsData[(idx * 3)]]]; } inline __device__ uint8_t const* brotli_transform_suffix(int idx) { return &kPrefixSuffix[kPrefixSuffixMap[kTransformsData[(idx * 3) + 2]]]; } /* typeof(LUT) == const uint8_t* */ __inline__ __device__ int brotli_need_context_lut(int mode) { return (mode < (4 << 9)); } __inline__ __device__ int brotli_context(int p1, int p2, int lut) { return kContextLookup[lut + p1] | kContextLookup[lut + p2 + 256]; } /** * @brief Various local scratch arrays */ struct huff_scratch_s { uint16_t code_length_histo[16]; uint8_t code_length_code_lengths[brotli_code_length_codes]; int8_t offset[6]; // offsets in sorted table for each length uint16_t lenvlctab[32]; uint16_t sorted[brotli_code_length_codes]; // symbols sorted by code length int16_t next_symbol[32]; uint16_t symbols_lists_array[720]; }; /** * @brief Contains a collection of Huffman trees with the same alphabet size. * max_symbol is needed due to simple codes since log2(alphabet_size) could be * greater than log2(max_symbol). */ struct debrotli_huff_tree_group_s { uint16_t alphabet_size; uint16_t max_symbol; uint16_t num_htrees; uint16_t pad; uint16_t* htrees[1]; }; // Must be able to at least hold worst-case context maps, tree groups and context modes constexpr int local_heap_size = (256 * 64 + 256 * 4 + 3 * (sizeof(debrotli_huff_tree_group_s) + 255 * sizeof(uint16_t*)) + 256 + 3 * brotli_huffman_max_size_258 * sizeof(uint16_t) + 3 * brotli_huffman_max_size_26 * sizeof(uint16_t)); /** * Brotli decoder state */ struct debrotli_state_s { // Bitstream uint8_t const* cur; uint8_t const* end; uint8_t const* base; uint2 bitbuf; uint32_t bitpos; int32_t error; // Output uint8_t* outbase; uint8_t* out; size_t bytes_left; // Decoded symbols uint8_t window_bits; uint8_t is_last; uint8_t is_uncompressed; uint8_t distance_postfix_bits; uint8_t distance_postfix_mask; uint8_t mtf_upper_bound; uint8_t p1; uint8_t p2; int32_t max_backward_distance; uint32_t num_block_types[3]; uint32_t block_length[3]; uint32_t num_direct_distance_codes; uint32_t meta_block_len; uint16_t heap_used; uint16_t heap_limit; uint8_t* context_map; uint8_t* dist_context_map; uint8_t* context_modes; uint8_t* fb_base; uint32_t fb_size; uint8_t block_type_rb[6]; uint8_t pad[2]; int dist_rb_idx; int dist_rb[4]; debrotli_huff_tree_group_s* literal_hgroup; debrotli_huff_tree_group_s* insert_copy_hgroup; debrotli_huff_tree_group_s* distance_hgroup; uint16_t* block_type_vlc[3]; huff_scratch_s hs; uint32_t mtf[65]; __align__(8) char heap[local_heap_size]; }; inline __device__ uint32_t Log2Floor(uint32_t value) { return 32 - __clz(value); } /// @brief initializes the bit reader __device__ void initbits(debrotli_state_s* s, uint8_t const* base, size_t len, size_t pos = 0) { uint8_t const* p = base + pos; auto prefix_bytes = (uint32_t)(((size_t)p) & 3); p -= prefix_bytes; s->base = base; s->end = base + len; s->cur = p; s->bitbuf.x = (p < s->end) ? *reinterpret_cast<uint32_t const*>(p) : 0; p += 4; s->bitbuf.y = (p < s->end) ? *reinterpret_cast<uint32_t const*>(p) : 0; s->bitpos = prefix_bytes * 8; } // return next 32 bits inline __device__ uint32_t next32bits(debrotli_state_s const* s) { return __funnelshift_rc(s->bitbuf.x, s->bitbuf.y, s->bitpos); } /// return next n bits inline __device__ uint32_t showbits(debrotli_state_s const* s, uint32_t n) { uint32_t next32 = __funnelshift_rc(s->bitbuf.x, s->bitbuf.y, s->bitpos); return (next32 & ((1 << n) - 1)); } inline __device__ void skipbits(debrotli_state_s* s, uint32_t n) { uint32_t bitpos = s->bitpos + n; if (bitpos >= 32) { uint8_t const* cur = s->cur + 8; s->bitbuf.x = s->bitbuf.y; s->bitbuf.y = (cur < s->end) ? *reinterpret_cast<uint32_t const*>(cur) : 0; s->cur = cur - 4; bitpos &= 0x1f; } s->bitpos = bitpos; } inline __device__ uint32_t getbits(debrotli_state_s* s, uint32_t n) { uint32_t bits = showbits(s, n); skipbits(s, n); return bits; } inline __device__ uint32_t getbits_bytealign(debrotli_state_s* s) { auto n = (uint32_t)((-(int32_t)s->bitpos) & 7); uint32_t bits = showbits(s, n); skipbits(s, n); return bits; } /** @brief Variable-length coding for 8-bit variable (1..11 bits) * encoded with the following variable-length code (as it appears in the * compressed data, where the bits are parsed from right to left, * so 0110111 has the value 12): * * Value Bit Pattern * ----- ----------- * 1 0 * 2 0001 * 3..4 x0011 * 5..8 xx0101 * 9..16 xxx0111 * 17..32 xxxx1001 * 33..64 xxxxx1011 * 65..128 xxxxxx1101 * 129..256 xxxxxxx1111 */ static __device__ uint32_t getbits_u8vlc(debrotli_state_s* s) { uint32_t next32 = next32bits(s); uint32_t v, len; if (!(next32 & 1)) { v = 0; len = 1; } else { len = (next32 >> 1) & 7; v = (1 << len) + ((next32 >> 4) & ((1 << len) - 1)); len += 4; } skipbits(s, len); return v; } /// Decode a Huffman code with 8-bit initial lookup static __device__ uint32_t getvlc(debrotli_state_s* s, uint16_t const* lut) { uint32_t next32 = next32bits(s); uint32_t vlc, len; lut += next32 & ((1 << huffman_lookup_table_width) - 1); vlc = lut[0]; len = vlc & 0x0f; vlc >>= 4; if (len > huffman_lookup_table_width) { len -= huffman_lookup_table_width; lut += vlc + ((next32 >> huffman_lookup_table_width) & ((1 << len) - 1)); vlc = lut[0]; len = huffman_lookup_table_width + (vlc & 0xf); vlc >>= 4; } skipbits(s, len); return vlc; } static auto __device__ allocation_size(uint32_t bytes) { return (bytes + 7) & ~7; } /// Alloc bytes from the local (shared mem) heap static __device__ uint8_t* local_alloc(debrotli_state_s* s, uint32_t bytes) { int heap_used = s->heap_used; auto const len = allocation_size(bytes); if (heap_used + len <= s->heap_limit) { auto* ptr = reinterpret_cast<uint8_t*>(&s->heap[heap_used]); s->heap_used = (uint16_t)(heap_used + len); return ptr; } else { return nullptr; } } /// Shrink the size of the local heap, returns ptr to end (used for stack-like intermediate /// allocations at the end of the heap) static __device__ uint8_t* local_heap_shrink(debrotli_state_s* s, uint32_t bytes) { int heap_used = s->heap_used; int heap_limit = s->heap_limit; auto const len = allocation_size(bytes); if (heap_limit - len >= heap_used) { heap_limit -= len; s->heap_limit = (uint16_t)heap_limit; return reinterpret_cast<uint8_t*>(&s->heap[heap_limit]); } else { return nullptr; } } static __device__ void local_heap_grow(debrotli_state_s* s, uint32_t bytes) { auto const len = allocation_size(bytes); int heap_limit = s->heap_limit + len; s->heap_limit = (uint16_t)heap_limit; } /// Alloc memory from the fixed-size heap shared between all blocks (thread0-only) static __device__ uint8_t* ext_heap_alloc(uint32_t bytes, uint8_t* ext_heap_base, uint32_t ext_heap_size) { uint32_t len = (bytes + 0xf) & ~0xf; volatile auto* heap_ptr = reinterpret_cast<volatile uint32_t*>(ext_heap_base); uint32_t first_free_block = ~0; for (;;) { uint32_t blk_next, blk_prev; first_free_block = atomicExch((unsigned int*)heap_ptr, first_free_block); if (first_free_block == ~0 || first_free_block >= ext_heap_size) { // Some other block is holding the heap or there are no free blocks: try again later continue; } if (first_free_block == 0) { // Heap is uninitialized first_free_block = 4 * sizeof(uint32_t); heap_ptr[4] = ext_heap_size; heap_ptr[5] = ext_heap_size - first_free_block; __threadfence(); continue; } blk_prev = 0; blk_next = first_free_block; do { uint32_t next, blksz; if (((blk_next & 3) != 0) || (blk_next >= ext_heap_size)) { // Corrupted heap atomicExch((unsigned int*)heap_ptr, first_free_block); return nullptr; } next = heap_ptr[(blk_next >> 2) + 0]; blksz = heap_ptr[(blk_next >> 2) + 1]; if (blksz >= len) { uint32_t blk_new = blk_next + len; uint32_t sz_new = blksz - len; if (sz_new >= 16) { // Reduce the size of the current block if (blk_prev == 0) first_free_block = blk_new; else heap_ptr[(blk_prev >> 2) + 0] = blk_new; heap_ptr[(blk_new >> 2) + 0] = next; heap_ptr[(blk_new >> 2) + 1] = sz_new; } else { // Re-use this block if (blk_prev == 0) first_free_block = next; else heap_ptr[(blk_prev >> 2) + 0] = next; } __threadfence(); // Restore the list head atomicExch((unsigned int*)heap_ptr, first_free_block); return ext_heap_base + blk_next; } else { blk_prev = blk_next; blk_next = next; } } while (blk_next != 0 && blk_next < ext_heap_size); first_free_block = atomicExch((unsigned int*)heap_ptr, first_free_block); // Reaching here means the heap is full // Just in case we're trying to allocate more than the entire heap if (len > ext_heap_size - 4 * sizeof(uint32_t)) { break; } } return nullptr; } /// Free a memory block (thread0-only) static __device__ void ext_heap_free(void* ptr, uint32_t bytes, uint8_t* ext_heap_base, uint32_t ext_heap_size) { uint32_t len = (bytes + 0xf) & ~0xf; volatile auto* heap_ptr = (volatile uint32_t*)ext_heap_base; uint32_t first_free_block = ~0; auto cur_blk = static_cast<uint32_t>(static_cast<uint8_t*>(ptr) - ext_heap_base); for (;;) { first_free_block = atomicExch((unsigned int*)heap_ptr, first_free_block); if (first_free_block != ~0) { break; } // Some other block is holding the heap } if (first_free_block >= ext_heap_size) { // Heap is currently empty first_free_block = cur_blk; heap_ptr[(cur_blk >> 2) + 0] = first_free_block; heap_ptr[(cur_blk >> 2) + 1] = len; } else { uint32_t blk_prev = 0; uint32_t blk_next = first_free_block; for (;;) { uint32_t next = heap_ptr[(blk_next >> 2) + 0]; uint32_t blksz = heap_ptr[(blk_next >> 2) + 1]; if (cur_blk + len < blk_next) { // Insert new block heap_ptr[(cur_blk >> 2) + 0] = blk_next; heap_ptr[(cur_blk >> 2) + 1] = len; if (blk_prev == 0) { first_free_block = cur_blk; } else if (blk_prev + heap_ptr[(blk_prev >> 2) + 1] == cur_blk) { // Merge with previous block heap_ptr[(blk_prev >> 2) + 1] = heap_ptr[(blk_prev >> 2) + 1] + len; } else { heap_ptr[(blk_prev >> 2) + 0] = cur_blk; } break; } else if (cur_blk + len == blk_next) { // Merge with next block heap_ptr[(cur_blk >> 2) + 0] = next; heap_ptr[(cur_blk >> 2) + 1] = len + blksz; if (blk_prev == 0) { first_free_block = cur_blk; } else if (blk_prev + heap_ptr[(blk_prev >> 2) + 1] == cur_blk) { // Also merge with previous block heap_ptr[(blk_prev >> 2) + 0] = next; heap_ptr[(blk_prev >> 2) + 1] = heap_ptr[(blk_prev >> 2) + 1] + len + blksz; } else { heap_ptr[(blk_prev >> 2) + 0] = cur_blk; } break; } else if (next < ext_heap_size) { // Move to the next block blk_prev = blk_next; blk_next = next; } else // Insert this block at the tail { heap_ptr[(cur_blk >> 2) + 0] = next; heap_ptr[(cur_blk >> 2) + 1] = len; if (blk_next + blksz == cur_blk) { // Merge with last block heap_ptr[(blk_next >> 2) + 1] = heap_ptr[(blk_next >> 2) + 1] + len; } else { heap_ptr[(blk_next >> 2) + 0] = cur_blk; } break; } } } __threadfence(); atomicExch((unsigned int*)heap_ptr, first_free_block); } static __device__ uint32_t BuildSimpleHuffmanTable(uint16_t* lut, int root_bits, uint16_t* val, uint32_t num_symbols) { uint32_t table_size = 1; uint32_t const goal_size = 1U << root_bits; switch (num_symbols) { case 0: lut[0] = huffcode(0, val[0]); break; case 1: if (val[1] > val[0]) { lut[0] = huffcode(1, val[0]); lut[1] = huffcode(1, val[1]); } else { lut[0] = huffcode(1, val[1]); lut[1] = huffcode(1, val[0]); } table_size = 2; break; case 2: lut[0] = huffcode(1, val[0]); lut[2] = huffcode(1, val[0]); if (val[2] > val[1]) { lut[1] = huffcode(2, val[1]); lut[3] = huffcode(2, val[2]); } else { lut[1] = huffcode(2, val[2]); lut[3] = huffcode(2, val[1]); } table_size = 4; break; case 3: { int i, k; for (i = 0; i < 3; ++i) { for (k = i + 1; k < 4; ++k) { if (val[k] < val[i]) { uint16_t t = val[k]; val[k] = val[i]; val[i] = t; } } } lut[0] = huffcode(2, val[0]); lut[2] = huffcode(2, val[1]); lut[1] = huffcode(2, val[2]); lut[3] = huffcode(2, val[3]); table_size = 4; break; } case 4: { if (val[3] < val[2]) { uint16_t t = val[3]; val[3] = val[2]; val[2] = t; } lut[0] = huffcode(1, val[0]); lut[1] = huffcode(2, val[1]); lut[2] = huffcode(1, val[0]); lut[3] = huffcode(3, val[2]); lut[4] = huffcode(1, val[0]); lut[5] = huffcode(2, val[1]); lut[6] = huffcode(1, val[0]); lut[7] = huffcode(3, val[3]); table_size = 8; break; } } while (table_size != goal_size) { memcpy(&lut[table_size], &lut[0], table_size * sizeof(lut[0])); table_size <<= 1; } return goal_size; } static __device__ void BuildCodeLengthsHuffmanTable(huff_scratch_s* hs) { uint32_t code; // current table entry int symbol; // symbol index in original or sorted table int key; // prefix code int key_step; // prefix code addend int step; // step size to replicate values in current table int table_size; // size of current table int bits; // Generate offsets into sorted symbol table by code length. symbol = -1; for (bits = 1; bits <= 5; bits++) { symbol += hs->code_length_histo[bits]; hs->offset[bits] = (int8_t)symbol; } // Symbols with code length 0 are placed after all other symbols. hs->offset[0] = brotli_code_length_codes - 1; // Sort symbols by length, by symbol order within each length. symbol = brotli_code_length_codes; do { symbol--; hs->sorted[hs->offset[hs->code_length_code_lengths[symbol]]--] = symbol; } while (symbol != 0); table_size = 1 << 5; // Special case: all symbols but one have 0 code length. if (hs->offset[0] == 0) { code = huffcode(0, hs->sorted[0]); for (key = 0; key < table_size; ++key) { hs->lenvlctab[key] = code; } return; } // Fill in table. key = 0; key_step = 1 << 7; symbol = 0; bits = 1; step = 2; do { for (int bits_count = hs->code_length_histo[bits]; bits_count != 0; --bits_count) { int end = table_size; code = huffcode(bits, hs->sorted[symbol++]); uint16_t* p = &hs->lenvlctab[brev8(key)]; do { end -= step; p[end] = code; } while (end > 0); key += key_step; } step <<= 1; key_step >>= 1; } while (++bits <= 5); } // Returns the table width of the next 2nd level table. |count| is the histogram // of bit lengths for the remaining symbols, |len| is the code length of the // next processed symbol. static __device__ int NextTableBitSize(uint16_t const* const count, int len, int root_bits) { int left = 1 << (len - root_bits); while (len < 15) { left -= count[len]; if (left <= 0) break; ++len; left <<= 1; } return len - root_bits; } // Build a huffman lookup table (currently thread0-only) static __device__ uint32_t BuildHuffmanTable(uint16_t* root_lut, int root_bits, uint16_t const* const symbol_lists, uint16_t* count) { uint32_t code; // current table entry uint16_t* lut; // next available space in table int len; // current code length int symbol; // symbol index in original or sorted table int key; // prefix code int key_step; // prefix code addend int sub_key; // 2nd level table prefix code int sub_key_step; // 2nd level table prefix code addend int step; // step size to replicate values in current table int table_bits; // key length of current table int table_size; // size of current table int total_size; // sum of root table size and 2nd level table sizes int max_length = -1; int bits; while (symbol_lists[max_length] == 0xFFFF) max_length--; max_length += 16; lut = root_lut; table_bits = root_bits; table_size = 1 << table_bits; total_size = table_size; // Fill in the root table. Reduce the table size to if possible, and create the repetitions by // memcpy. if (table_bits > max_length) { table_bits = max_length; table_size = 1 << table_bits; } key = 0; key_step = 1 << 7; bits = 1; step = 2; do { symbol = bits - 16; for (int bits_count = count[bits]; bits_count != 0; --bits_count) { symbol = symbol_lists[symbol]; code = huffcode(bits, symbol); uint16_t* p = &lut[brev8(key)]; int end = table_size; do { end -= step; p[end] = code; } while (end > 0); key += key_step; } step <<= 1; key_step >>= 1; } while (++bits <= table_bits); // If root_bits != table_bits then replicate to fill the remaining slots. while (total_size != table_size) { memcpy(&lut[table_size], &lut[0], table_size * sizeof(lut[0])); table_size <<= 1; } // Fill in 2nd level tables and add pointers to root table. key_step = (1 << 7) >> (root_bits - 1); sub_key = (1 << 8); sub_key_step = (1 << 7); for (len = root_bits + 1, step = 2; len <= max_length; ++len) { symbol = len - 16; for (; count[len] != 0; --count[len]) { if (sub_key == (1 << 8)) { lut += table_size; table_bits = NextTableBitSize(count, len, root_bits); table_size = 1 << table_bits; total_size += table_size; sub_key = brev8(key); key += key_step; root_lut[sub_key] = huffcode(table_bits + root_bits, (((size_t)(lut - root_lut)) - sub_key)); sub_key = 0; } symbol = symbol_lists[symbol]; code = huffcode(len - root_bits, symbol); uint16_t* p = &lut[brev8(sub_key)]; int end = table_size; do { end -= step; p[end] = code; } while (end > 0); sub_key += sub_key_step; } step <<= 1; sub_key_step >>= 1; } return (uint32_t)total_size; } /** 3.4. Simple Prefix Codes The first two bits of the compressed representation of each prefix code distinguish between simple and complex prefix codes. If this value is 1, then a simple prefix code follows as described in this section. Otherwise, a complex prefix code follows as described in Section 3.5. A simple prefix code can have up to four symbols with non-zero code length. The format of the simple prefix code is as follows: 2 bits: value of 1 indicates a simple prefix code 2 bits: NSYM - 1, where NSYM = number of symbols coded NSYM symbols, each encoded using ALPHABET_BITS bits 1 bit: tree-select, present only for NSYM = 4 The value of ALPHABET_BITS depends on the alphabet of the prefix code: it is the smallest number of bits that can represent all symbols in the alphabet. For example, for the alphabet of literal bytes, ALPHABET_BITS is 8. The value of each of the NSYM symbols above is the value of the ALPHABET_BITS width integer value. If the integer value is greater than or equal to the alphabet size, or the value is identical to a previous value, then the stream should be rejected as invalid. Note that the NSYM symbols may not be presented in sorted order. Prefix codes of the same bit length must be assigned to the symbols in sorted order. The (non-zero) code lengths of the symbols can be reconstructed as follows: * if NSYM = 1, the code length for the one symbol is zero -- when encoding this symbol in the compressed data stream using this prefix code, no actual bits are emitted. Similarly, when decoding a symbol using this prefix code, no bits are read and the one symbol is returned. * if NSYM = 2, both symbols have code length 1. * if NSYM = 3, the code lengths for the symbols are 1, 2, 2 in the order they appear in the representation of the simple prefix code. * if NSYM = 4, the code lengths (in order of symbols decoded) depend on the tree-select bit: 2, 2, 2, 2 (tree-select bit 0), or 1, 2, 3, 3 (tree-select bit 1). 3.5. Complex Prefix Codes A complex prefix code is a canonical prefix code, defined by the sequence of code lengths, as discussed in Section 3.2. For even greater compactness, the code length sequences themselves are compressed using a prefix code. The alphabet for code lengths is as follows: 0..15: Represent code lengths of 0..15 16: Copy the previous non-zero code length 3..6 times. The next 2 bits indicate repeat length (0 = 3, ... , 3 = 6) If this is the first code length, or all previous code lengths are zero, a code length of 8 is repeated 3..6 times. A repeated code length code of 16 modifies the repeat count of the previous one as follows: repeat count = (4 * (repeat count - 2)) + (3..6 on the next 2 bits) Example: Codes 7, 16 (+2 bits 11), 16 (+2 bits 10) will expand to 22 code lengths of 7 (1 + 4 * (6 - 2) + 5) 17: Repeat a code length of 0 for 3..10 times. The next 3 bits indicate repeat length (0 = 3, ... , 7 = 10) A repeated code length code of 17 modifies the repeat count of the previous one as follows: repeat count = (8 * (repeat count - 2)) + (3..10 on the next 3 bits) Note that a code of 16 that follows an immediately preceding 16 modifies the previous repeat count, which becomes the new repeat count. The same is true for a 17 following a 17. A sequence of three or more 16 codes in a row or three of more 17 codes in a row is possible, modifying the count each time. Only the final repeat count is used. The modification only applies if the same code follows. A 16 repeat does not modify an immediately preceding 17 count nor vice versa. A code length of 0 indicates that the corresponding symbol in the alphabet will not occur in the compressed data, and it should not participate in the prefix code construction algorithm given earlier. A complex prefix code must have at least two non-zero code lengths. The bit lengths of the prefix code over the code length alphabet are compressed with the following variable-length code (as it appears in the compressed data, where the bits are parsed from right to left): Symbol Code ------ ---- 0 00 1 0111 2 011 3 10 4 01 5 1111 We can now define the format of the complex prefix code as follows: o 2 bits: HSKIP, the number of skipped code lengths, can have values of 0, 2, or 3. The skipped lengths are taken to be zero. (An HSKIP of 1 indicates a Simple prefix code.) o Code lengths for symbols in the code length alphabet given just above, in the order: 1, 2, 3, 4, 0, 5, 17, 6, 16, 7, 8, 9, 10, 11, 12, 13, 14, 15. If HSKIP is 2, then the code lengths for symbols 1 and 2 are zero, and the first code length is for symbol 3. If HSKIP is 3, then the code length for symbol 3 is also zero, and the first code length is for symbol 4. The code lengths of code length symbols are between 0 and 5, and they are represented with 2..4 bits according to the variable- length code above. A code length of 0 means the corresponding code length symbol is not used. If HSKIP is 2 or 3, a respective number of leading code lengths are implicit zeros and are not present in the code length sequence above. If there are at least two non-zero code lengths, any trailing zero code lengths are omitted, i.e., the last code length in the sequence must be non-zero. In this case, the sum of (32 >> code length) over all the non-zero code lengths must equal to 32. If the lengths have been read for the entire code length alphabet and there was only one non-zero code length, then the prefix code has one symbol whose code has zero length. In this case, that symbol results in no bits being emitted by the compressor and no bits consumed by the decompressor. That single symbol is immediately returned when this code is decoded. An example of where this occurs is if the entire code to be represented has symbols of length 8. For example, a literal code that represents all literal values with equal probability. In this case the single symbol is 16, which repeats the previous length. The previous length is taken to be 8 before any code length code lengths are read. o Sequence of code length symbols, which is at most the size of the alphabet, encoded using the code length prefix code. Any trailing 0 or 17 must be omitted, i.e., the last encoded code length symbol must be between 1 and 16. The sum of (32768 >> code length) over all the non-zero code lengths in the alphabet, including those encoded using repeat code(s) of 16, must be equal to 32768. If the number of times to repeat the previous length or repeat a zero length would result in more lengths in total than the number of symbols in the alphabet, then the stream should be rejected as invalid. */ // Decode Huffman tree (thread0-only) static __device__ uint32_t DecodeHuffmanTree(debrotli_state_s* s, uint32_t alphabet_size, uint32_t max_symbol, uint16_t* vlctab) { uint32_t prefix_code_type; prefix_code_type = getbits(s, 2); if (prefix_code_type == 1) { // Simple prefix Code uint32_t nsym = getbits(s, 2); uint32_t alphabet_bits = Log2Floor(alphabet_size - 1); for (uint32_t n = 0; n <= nsym; n++) { uint32_t v = getbits(s, alphabet_bits); if (v >= max_symbol) { s->error = -1; return 0; } for (uint32_t i = 0; i < n; i++) { if (v == s->hs.symbols_lists_array[i]) { s->error = -1; // Duplicate symbol return 0; } } s->hs.symbols_lists_array[n] = (uint16_t)v; } if (nsym == 3) { nsym += getbits(s, 1); // tree_select; } return BuildSimpleHuffmanTable( vlctab, huffman_lookup_table_width, s->hs.symbols_lists_array, nsym); } else { // Complex prefix code huff_scratch_s* const hs = &s->hs; uint16_t* symbol_lists = &s->hs.symbols_lists_array[16]; // Make small negative indexes addressable uint32_t space = 32, num_codes = 0, i, prev_code_len, symbol, repeat, repeat_code_len; memset(&hs->code_length_histo[0], 0, 6 * sizeof(hs->code_length_histo)); memset(&hs->code_length_code_lengths[0], 0, sizeof(hs->code_length_code_lengths)); for (i = prefix_code_type; i < 18; i++) { uint8_t const code_len_idx = kCodeLengthCodeOrder[i]; uint32_t ix, v; ix = showbits(s, 4); v = kCodeLengthPrefixValue[ix]; skipbits(s, kCodeLengthPrefixLength[ix]); hs->code_length_code_lengths[code_len_idx] = (uint8_t)v; if (v != 0) { space = space - (32u >> v); ++num_codes; ++hs->code_length_histo[v]; if (space - 1u >= 32u) { // space is 0 or wrapped around. break; } } } if (!(num_codes == 1 || space == 0)) { s->error = -1; return 0; } BuildCodeLengthsHuffmanTable(&s->hs); for (i = 0; i <= 15; ++i) { hs->code_length_histo[i] = 0; hs->next_symbol[i] = (int16_t)(i - 16); symbol_lists[hs->next_symbol[i]] = 0xFFFF; } symbol = 0; prev_code_len = 8; repeat = 0; repeat_code_len = 0; space = 32768; while (symbol < max_symbol && space > 0) { uint32_t next32 = next32bits(s); uint32_t code_len = hs->lenvlctab[next32 & 0x1f]; uint32_t vlc_len = code_len & 0xf; // Use 1..5 bits code_len >>= 4; // code_len = 0..17 if (code_len < 16) { // Process single decoded symbol code length : // A) reset the repeat variable // B) remember code length(if it is not 0) // C) extend corresponding index - chain // D) reduce the Huffman space // E) update the histogram repeat = 0; if (code_len != 0) { // code_len == 1..15 symbol_lists[hs->next_symbol[code_len]] = (uint16_t)symbol; hs->next_symbol[code_len] = (int)symbol; prev_code_len = code_len; space -= 32768u >> code_len; hs->code_length_histo[code_len]++; } symbol++; } else { // Process repeated symbol code length. // A) Check if it is the extension of previous repeat sequence; if the decoded value is not // 16, then it is a new symbol-skip B) Update repeat variable C) Check if operation is // feasible (fits alphabet) D) For each symbol do the same operations as in single symbol uint32_t extra_bits, repeat_delta, new_len, old_repeat; if (code_len == 16) { extra_bits = 2; repeat_delta = (next32 >> vlc_len) & 3; new_len = prev_code_len; } else { extra_bits = 3; repeat_delta = (next32 >> vlc_len) & 7; new_len = 0; } vlc_len += extra_bits; if (repeat_code_len != new_len) { repeat = 0; repeat_code_len = new_len; } old_repeat = repeat; if (repeat > 0) { repeat = (repeat - 2) << extra_bits; } repeat += repeat_delta + 3u; repeat_delta = repeat - old_repeat; if (symbol + repeat_delta > max_symbol) { s->error = -1; return 0; } if (repeat_code_len != 0) { uint32_t last = symbol + repeat_delta; int next = hs->next_symbol[repeat_code_len]; do { symbol_lists[next] = (uint16_t)symbol; next = (int)symbol; } while (++symbol != last); hs->next_symbol[repeat_code_len] = next; space -= repeat_delta << (15 - repeat_code_len); hs->code_length_histo[repeat_code_len] = (uint16_t)(hs->code_length_histo[repeat_code_len] + repeat_delta); } else { symbol += repeat_delta; } } skipbits(s, vlc_len); } if (space != 0) { s->error = -1; return 0; } return BuildHuffmanTable( vlctab, huffman_lookup_table_width, symbol_lists, hs->code_length_histo); } } /** 9.1. Format of the Stream Header The stream header has only the following one field: 1..7 bits: WBITS, a value in the range 10..24, encoded with the following variable - length code(as it appears in the compressed data, where the bits are parsed from right to left) : Value Bit Pattern ---- - ---------- - 10 0100001 11 0110001 12 1000001 13 1010001 14 1100001 15 1110001 16 0 17 0000001 18 0011 19 0101 20 0111 21 1001 22 1011 23 1101 24 1111 Note that bit pattern 0010001 is invalid and must not be used. The size of the sliding window, which is the maximum value of any non - dictionary reference backward distance, is given by the following formula : window size = (1 << WBITS) - 16 */ static __device__ void DecodeStreamHeader(debrotli_state_s* s) { uint32_t next32 = next32bits(s); uint32_t wbits = 0, len = 0; if ((next32 & 1) == 0) { // 0 wbits = 16; len = 1; } else { uint32_t n = (next32 >> 1) & 7; if (n != 0) { // xxx1 wbits = 17 + n; len = 4; } else { n = (next32 >> 4) & 7; if (n != 1) { wbits = (n) ? 8 + n : 17; // xxx0001 len = 7; } else { // Large window (not supported) or invalid, bail s->error = -1; } } } s->window_bits = (uint8_t)wbits; s->max_backward_distance = (1 << s->window_bits) - 16; skipbits(s, len); } /** 9.2.Format of the Meta - Block Header A compliant compressed data set has at least one meta - block.Each meta - block contains a header with information about the uncompressed length of the meta - block, and a bit signaling if the meta - block is the last one.The format of the meta - block header is the following : 1 bit : ISLAST, set to 1 if this is the last meta - block 1 bit : ISLASTEMPTY, if set to 1, the meta - block is empty; this field is only present if ISLAST bit is set-- if it is 1, then the meta - block and the brotli stream ends at that bit, with any remaining bits in the last byte of the compressed stream filled with zeros(if the fill bits are not zero, then the stream should be rejected as invalid) 2 bits: MNIBBLES, number of nibbles to represent the uncompressed length, encoded with the following fixed - length code : Value Bit Pattern ---- - ---------- - 0 11 4 00 5 01 6 10 If MNIBBLES is 0, the meta - block is empty, i.e., it does not generate any uncompressed data.In this case, the rest of the meta - block has the following format : 1 bit : reserved, must be zero 2 bits : MSKIPBYTES, number of bytes to represent metadata length MSKIPBYTES * 8 bits : MSKIPLEN - 1, where MSKIPLEN is the number of metadata bytes; this field is only present if MSKIPBYTES is positive; otherwise, MSKIPLEN is 0 (if MSKIPBYTES is greater than 1, and the last byte is all zeros, then the stream should be rejected as invalid) 0..7 bits: fill bits until the next byte boundary, must be all zeros MSKIPLEN bytes of metadata, not part of the uncompressed data or the sliding window MNIBBLES * 4 bits: MLEN - 1, where MLEN is the length of the meta - block uncompressed data in bytes(if MNIBBLES is greater than 4, and the last nibble is all zeros, then the stream should be rejected as invalid) 1 bit : ISUNCOMPRESSED, if set to 1, any bits of compressed data up to the next byte boundary are ignored, and the rest of the meta - block contains MLEN bytes of literal data; this field is only present if the ISLAST bit is not set(if the ignored bits are not all zeros, the stream should be rejected as invalid) */ static __device__ void DecodeMetaBlockHeader(debrotli_state_s* s) { uint32_t next32 = next32bits(s); uint32_t len = 1, is_empty = 0; s->is_last = (uint8_t)(next32 & 1); if (s->is_last) { is_empty = (uint8_t)((next32 >> 1) & 1); len++; } s->meta_block_len = 0; s->is_uncompressed = 0; if (!is_empty) { uint32_t mnibbles = 4 + ((next32 >> len) & 3); len += 2; if (mnibbles < 7) { s->meta_block_len = 1 + ((next32 >> len) & ((1u << (mnibbles * 4)) - 1)); len += mnibbles * 4; if (mnibbles > 4u && s->meta_block_len <= (1u << (mnibbles * 4 - 4))) { s->error = -1; } if (!s->is_last) { s->is_uncompressed = (uint8_t)((next32 >> len) & 1); len++; } } else { uint32_t reserved, mskipbytes, mskiplen; reserved = (next32 >> len) & 1; if (reserved != 0) { s->error = -1; } len += 1; mskipbytes = (next32 >> len) & 3; len += 2; if (mskipbytes > 0) { mskiplen = 1 + (next32 >> len) & ((1u << (mskipbytes * 8)) - 1); if (mskiplen <= ((1u << (mskipbytes * 8)) >> 8)) { s->error = -1; // Last byte is all zeros } len += mskipbytes * 8; } else { mskiplen = 0; } skipbits(s, len); if (getbits_bytealign(s) != 0) { s->error = 1; } for (len = mskiplen; len >= 32; len -= 32) { skipbits(s, 32); } } } skipbits(s, len); } /** 1..11 bits: NBLTYPESL, number of literal block types Prefix code over the block type code alphabet for literal block types, appears only if NBLTYPESL >= 2 Prefix code over the block count code alphabet for literal block counts, appears only if NBLTYPESL >= 2 Block count code + extra bits for first literal block count, appears only if NBLTYPESL >= 2 1..11 bits: NBLTYPESI, number of insert-and-copy block types, encoded with the same variable-length code as above Prefix code over the block type code alphabet for insert-and- copy block types, appears only if NBLTYPESI >= 2 Prefix code over the block count code alphabet for insert-and- copy block counts, appears only if NBLTYPESI >= 2 Block count code + extra bits for first insert-and-copy block count, appears only if NBLTYPESI >= 2 1..11 bits: NBLTYPESD, number of distance block types, encoded with the same variable-length code as above Prefix code over the block type code alphabet for distance block types, appears only if NBLTYPESD >= 2 Prefix code over the block count code alphabet for distance block counts, appears only if NBLTYPESD >= 2 Block count code + extra bits for first distance block count, appears only if NBLTYPESD >= 2 */ static __device__ void DecodeHuffmanTables(debrotli_state_s* s) { for (int b = 0; b < 3; b++) { uint32_t nbltypes = 1 + getbits_u8vlc(s); s->num_block_types[b] = nbltypes; if (nbltypes >= 2) { uint32_t alphabet_size = nbltypes + 2, index, nbits, maxtblsz; uint16_t* vlctab; maxtblsz = kMaxHuffmanTableSize[(alphabet_size + 31) >> 5]; maxtblsz = (maxtblsz > brotli_huffman_max_size_258) ? brotli_huffman_max_size_258 : maxtblsz; vlctab = reinterpret_cast<uint16_t*>( local_alloc(s, (brotli_huffman_max_size_26 + maxtblsz) * sizeof(uint16_t))); s->block_type_vlc[b] = vlctab; DecodeHuffmanTree(s, alphabet_size, alphabet_size, vlctab + brotli_huffman_max_size_26); alphabet_size = 26; DecodeHuffmanTree(s, alphabet_size, alphabet_size, vlctab); if (s->error) { break; } index = getvlc(s, vlctab); nbits = kBlockLengthPrefixCodeBits[index]; // nbits == 2..24 s->block_length[b] = kBlockLengthPrefixCodeOffset[index] + getbits(s, nbits); } else { s->block_length[b] = 1 << 24; } } s->block_type_rb[0] = 1; s->block_type_rb[1] = 0; s->block_type_rb[2] = 1; s->block_type_rb[3] = 0; s->block_type_rb[4] = 1; s->block_type_rb[5] = 0; } /** @brief Transform: * 1) initialize list L with values 0, 1,... 255 * 2) For each input element X: * 2.1) let Y = L[X] * 2.2) remove X-th element from L * 2.3) prepend Y to L * 2.4) append Y to output * * In most cases max(Y) <= 7, so most of L remains intact. * To reduce the cost of initialization, we reuse L, remember the upper bound * of Y values, and reinitialize only first elements in L. * * Most of input values are 0 and 1. To reduce number of branches, we replace * inner for loop with do-while. */ static __device__ void InverseMoveToFrontTransform(debrotli_state_s* s, uint8_t* v, uint32_t v_len) { // Reinitialize elements that could have been changed. uint32_t i = 1; uint32_t upper_bound = s->mtf_upper_bound; uint32_t* mtf = &s->mtf[1]; // Make mtf[-1] addressable. auto* mtf_u8 = reinterpret_cast<uint8_t*>(mtf); uint32_t pattern = 0x0302'0100; // Little-endian // Initialize list using 4 consequent values pattern. mtf[0] = pattern; do { pattern += 0x0404'0404; // Advance all 4 values by 4. mtf[i] = pattern; i++; } while (i <= upper_bound); // Transform the input. upper_bound = 0; for (i = 0; i < v_len; ++i) { int index = v[i]; uint8_t value = mtf_u8[index]; upper_bound |= v[i]; v[i] = value; mtf_u8[-1] = value; do { index--; mtf_u8[index + 1] = mtf_u8[index]; } while (index >= 0); } // Remember amount of elements to be reinitialized. s->mtf_upper_bound = upper_bound >> 2; } static __device__ uint32_t DecodeContextMap(debrotli_state_s* s, uint8_t* context_map, uint32_t context_map_size, uint16_t* context_map_vlc) { uint32_t num_htrees = getbits_u8vlc(s) + 1; uint32_t bits, context_index, max_run_length_prefix, alphabet_size; if (num_htrees <= 1) { memset(context_map, 0, context_map_size); return num_htrees; } bits = showbits(s, 5); if ((bits & 1) != 0) { // Use RLE for zeros. max_run_length_prefix = (bits >> 1) + 1; skipbits(s, 5); } else { max_run_length_prefix = 0; skipbits(s, 1); } alphabet_size = num_htrees + max_run_length_prefix; DecodeHuffmanTree(s, alphabet_size, alphabet_size, context_map_vlc); if (s->error) { return num_htrees; } context_index = 0; while (context_index < context_map_size) { uint32_t code = getvlc(s, context_map_vlc); if (code == 0) { context_map[context_index++] = 0; } else if (code > max_run_length_prefix) { context_map[context_index++] = (uint8_t)(code - max_run_length_prefix); } else { // RLE sub-stage. uint32_t reps = getbits(s, code) + (1u << code); if (context_index + reps > context_map_size) { s->error = -1; break; } do { context_map[context_index++] = 0; } while (--reps); } } bits = getbits(s, 1); if (bits != 0) { InverseMoveToFrontTransform(s, context_map, context_map_size); } return num_htrees; } static __device__ void DetectTrivialLiteralBlockTypes(debrotli_state_s* s) { uint32_t i; for (i = 0; i < s->num_block_types[0]; i++) { uint32_t offset = i << 6; uint32_t error = 0; uint32_t sample = s->context_map[offset]; uint32_t j; for (j = 0; j < (1u << 6); ++j) { error |= s->context_map[offset + j] ^ sample; } if (error == 0) { s->context_modes[i] |= 4u; } } } /** 2 bits: NPOSTFIX, parameter used in the distance coding 4 bits: four most significant bits of NDIRECT, to get the actual value of the parameter NDIRECT, left-shift this four-bit number by NPOSTFIX bits NBLTYPESL * 2 bits: context mode for each literal block type 1..11 bits: NTREESL, number of literal prefix trees, encoded with the same variable-length code as NBLTYPESL Literal context map, encoded as described in Section 7.3, appears only if NTREESL >= 2; otherwise, the context map has only zero values 1..11 bits: NTREESD, number of distance prefix trees, encoded with the same variable-length code as NBLTYPESD Distance context map, encoded as described in Section 7.3, appears only if NTREESD >= 2; otherwise, the context map has only zero values */ static __device__ debrotli_huff_tree_group_s* HuffmanTreeGroupInit(debrotli_state_s* s, uint32_t alphabet_size, uint32_t max_symbol, uint32_t ntrees) { auto* group = reinterpret_cast<debrotli_huff_tree_group_s*>(local_alloc( s, sizeof(debrotli_huff_tree_group_s) + ntrees * sizeof(uint16_t*) - sizeof(uint16_t*))); group->alphabet_size = (uint16_t)alphabet_size; group->max_symbol = (uint16_t)max_symbol; group->num_htrees = (uint16_t)ntrees; group->htrees[0] = nullptr; return group; } static __device__ void HuffmanTreeGroupAlloc(debrotli_state_s* s, debrotli_huff_tree_group_s* group) { if (!group->htrees[0]) { uint32_t alphabet_size = group->alphabet_size; uint32_t ntrees = group->num_htrees; uint32_t max_table_size = kMaxHuffmanTableSize[(alphabet_size + 31) >> 5]; uint32_t code_size = sizeof(uint16_t) * ntrees * max_table_size; group->htrees[0] = reinterpret_cast<uint16_t*>(local_alloc(s, code_size)); if (!group->htrees[0]) { if (s->fb_base) { group->htrees[0] = reinterpret_cast<uint16_t*>(s->fb_base + s->fb_size); } s->fb_size += (code_size + 3) & ~3; } } } // Decodes a series of Huffman table using ReadHuffmanCode function. static __device__ void HuffmanTreeGroupDecode(debrotli_state_s* s, debrotli_huff_tree_group_s* group) { uint16_t* next = group->htrees[0]; for (int htree_index = 0; htree_index < group->num_htrees; htree_index++) { uint32_t table_size = DecodeHuffmanTree(s, group->alphabet_size, group->max_symbol, next); if (s->error) break; group->htrees[htree_index] = next; next += table_size; } } static __device__ void DecodeHuffmanTreeGroups(debrotli_state_s* s, uint8_t* fb_heap_base, uint32_t fb_heap_size) { uint32_t bits, npostfix, ndirect, nbltypesl; uint32_t context_map_size; uint16_t* context_map_vlc; uint32_t num_direct_codes, num_distance_codes, num_literal_htrees, num_dist_htrees; // Decode context maps bits = getbits(s, 6); npostfix = bits & 3; ndirect = (bits >> 2) << npostfix; s->distance_postfix_bits = (uint8_t)npostfix; s->num_direct_distance_codes = brotli_num_distance_short_codes + ndirect; s->distance_postfix_mask = (1 << npostfix) - 1; nbltypesl = s->num_block_types[0]; s->context_modes = local_alloc(s, nbltypesl); for (uint32_t i = 0; i < nbltypesl; i++) { s->context_modes[i] = getbits(s, 2); } context_map_vlc = reinterpret_cast<uint16_t*>( local_heap_shrink(s, brotli_huffman_max_size_272 * sizeof(uint16_t))); context_map_size = nbltypesl << 6; s->context_map = local_alloc(s, context_map_size); num_literal_htrees = DecodeContextMap(s, s->context_map, context_map_size, context_map_vlc); if (s->error) return; DetectTrivialLiteralBlockTypes(s); context_map_size = s->num_block_types[2] << 2; s->dist_context_map = local_alloc(s, context_map_size); num_dist_htrees = DecodeContextMap(s, s->dist_context_map, context_map_size, context_map_vlc); if (s->error) return; local_heap_grow(s, brotli_huffman_max_size_272 * sizeof(uint16_t)); // free context map vlc num_direct_codes = s->num_direct_distance_codes - brotli_num_distance_short_codes; num_distance_codes = brotli_distance_alphabet_size(s->distance_postfix_bits, num_direct_codes, 24u); s->literal_hgroup = HuffmanTreeGroupInit( s, brotli_num_literal_symbols, brotli_num_literal_symbols, num_literal_htrees); s->insert_copy_hgroup = HuffmanTreeGroupInit( s, brotli_num_command_symbols, brotli_num_command_symbols, s->num_block_types[1]); s->distance_hgroup = HuffmanTreeGroupInit(s, num_distance_codes, num_distance_codes, num_dist_htrees); // Attempt to allocate local memory first, before going to fb s->fb_size = 0; HuffmanTreeGroupAlloc(s, s->literal_hgroup); HuffmanTreeGroupAlloc(s, s->insert_copy_hgroup); HuffmanTreeGroupAlloc(s, s->distance_hgroup); if (s->fb_size != 0) { // Did not fit in local memory -> allocate fb s->fb_base = ext_heap_alloc(s->fb_size, fb_heap_base, fb_heap_size); if (!s->fb_base) { s->error = -2; s->fb_size = 0; return; } // Repeat allocation falling back to fb s->fb_size = 0; HuffmanTreeGroupAlloc(s, s->literal_hgroup); HuffmanTreeGroupAlloc(s, s->insert_copy_hgroup); HuffmanTreeGroupAlloc(s, s->distance_hgroup); } HuffmanTreeGroupDecode(s, s->literal_hgroup); if (s->error) return; HuffmanTreeGroupDecode(s, s->insert_copy_hgroup); if (s->error) return; HuffmanTreeGroupDecode(s, s->distance_hgroup); } static __device__ int PrepareLiteralDecoding(debrotli_state_s* s, uint8_t const*& context_map_slice) { int context_mode; uint32_t block_type = s->block_type_rb[1]; uint32_t context_offset = block_type << 6; context_map_slice = s->context_map + context_offset; context_mode = s->context_modes[block_type]; return brotli_context_lut(context_mode); } /// Decodes a command or literal and updates block type ring-buffer. Reads 3..54 bits. static __device__ uint32_t DecodeBlockTypeAndLength(debrotli_state_s* s, int tree_type) { uint32_t max_block_type = s->num_block_types[tree_type]; if (max_block_type > 1) { uint16_t const* len_tree = s->block_type_vlc[tree_type]; uint16_t const* type_tree = len_tree + brotli_huffman_max_size_26; uint8_t* ringbuffer = &s->block_type_rb[tree_type * 2]; // Read 0..15 + 3..39 bits. uint32_t block_type = getvlc(s, type_tree); uint32_t block_len = getvlc(s, len_tree); block_len = kBlockLengthPrefixCodeOffset[block_len] + getbits(s, kBlockLengthPrefixCodeBits[block_len]); if (block_type == 1) { block_type = ringbuffer[1] + 1; } else if (block_type == 0) { block_type = ringbuffer[0]; } else { block_type -= 2; } if (block_type >= max_block_type) { block_type -= max_block_type; } ringbuffer[0] = ringbuffer[1]; ringbuffer[1] = (uint8_t)block_type; return block_len; } else { return 0; // Can only get here because of bitstream error } } inline __device__ int ToUpperCase(uint8_t* p) { if (p[0] < 0xC0) { if (p[0] >= 'a' && p[0] <= 'z') { p[0] ^= 32; } return 1; } // An overly simplified uppercasing model for UTF-8. if (p[0] < 0xE0) { p[1] ^= 32; return 2; } // An arbitrary transform for three byte characters. p[2] ^= 5; return 3; } static __device__ int TransformDictionaryWord(uint8_t* dst, uint8_t const* word, int len, int transform_idx) { int idx = 0; uint8_t const* prefix = brotli_transform_prefix(transform_idx); uint8_t type = brotli_transform_type(transform_idx); uint8_t const* suffix = brotli_transform_suffix(transform_idx); { int prefix_len = *prefix++; while (prefix_len--) { dst[idx++] = *prefix++; } } { int const t = type; int i = 0; if (t <= BROTLI_TRANSFORM_OMIT_LAST_9) { len -= t; } else if (t >= BROTLI_TRANSFORM_OMIT_FIRST_1 && t <= BROTLI_TRANSFORM_OMIT_FIRST_9) { int skip = t - (BROTLI_TRANSFORM_OMIT_FIRST_1 - 1); word += skip; len -= skip; } while (i < len) { dst[idx++] = word[i++]; } if (t == BROTLI_TRANSFORM_UPPERCASE_FIRST) { ToUpperCase(&dst[idx - len]); } else if (t == BROTLI_TRANSFORM_UPPERCASE_ALL) { uint8_t* uppercase = &dst[idx - len]; while (len > 0) { int step = ToUpperCase(uppercase); uppercase += step; len -= step; } } } { int suffix_len = *suffix++; while (suffix_len--) { dst[idx++] = *suffix++; } return idx; } } /// ProcessCommands, actual decoding: 1 warp, most work done by thread0 static __device__ void ProcessCommands(debrotli_state_s* s, brotli_dictionary_s const* words, int t) { int32_t meta_block_len = s->meta_block_len; uint8_t* out = s->out; int32_t pos = 0; int p1 = s->p1; int p2 = s->p2; uint16_t const* htree_command; uint8_t const *context_map_slice, *dist_context_map_slice; int dist_rb_idx; uint32_t blen_L, blen_I, blen_D; auto* const dict_scratch = reinterpret_cast<uint8_t*>( &s->hs); // 24+13 bytes (max length of a dictionary word including prefix & suffix) int context_mode; if (!t) { context_mode = PrepareLiteralDecoding(s, context_map_slice); dist_context_map_slice = s->dist_context_map; htree_command = s->insert_copy_hgroup->htrees[0]; dist_rb_idx = s->dist_rb_idx; blen_L = s->block_length[0]; blen_I = s->block_length[1]; blen_D = s->block_length[2]; } while (pos < meta_block_len) { uint32_t copy_length; int32_t distance_code; if (!t) { if (blen_I == 0) { blen_I = DecodeBlockTypeAndLength(s, 1); htree_command = s->insert_copy_hgroup->htrees[s->block_type_rb[3]]; if (s->cur >= s->end) { s->error = 1; pos = meta_block_len; } } // Read the insert/copy length in the command. { uint32_t cmd_code = getvlc(s, htree_command); CmdLutElement v = kCmdLut[cmd_code]; uint8_t distance_context = v.context; uint32_t insert_length = v.insert_len_offset; int32_t max_distance; distance_code = v.distance_code; if (v.insert_len_extra_bits) { insert_length += getbits(s, v.insert_len_extra_bits); } copy_length = v.copy_len_offset; if (v.copy_len_extra_bits) { copy_length += getbits(s, v.copy_len_extra_bits); } --blen_I; if (insert_length != 0) { if (pos + insert_length > meta_block_len) { s->error = -2; pos = meta_block_len; } // Read the literals in the command. else do { int len; if (blen_L == 0) { blen_L = DecodeBlockTypeAndLength(s, 0); context_mode = PrepareLiteralDecoding(s, context_map_slice); } len = min(blen_L, insert_length); insert_length -= len; blen_L -= len; if (brotli_need_context_lut(context_mode)) { debrotli_huff_tree_group_s const* literal_hgroup = s->literal_hgroup; do { int context = brotli_context(p1, p2, context_mode); p2 = p1; p1 = getvlc(s, literal_hgroup->htrees[context_map_slice[context]]); out[pos++] = p1; } while (--len); } else { uint16_t const* literal_htree = s->literal_hgroup->htrees[context_map_slice[0]]; do { p2 = p1; p1 = getvlc(s, literal_htree); out[pos++] = p1; } while (--len); } } while (insert_length); if (pos == meta_block_len) { copy_length = 0; } } // Non-literal symbol if (pos < meta_block_len) { if (distance_code >= 0) { // Implicit distance case. --dist_rb_idx; distance_code = s->dist_rb[dist_rb_idx & 3]; distance_context = 1; } else { uint16_t const* distance_tree; int distval; // Read distance code in the command, unless it was implicitly zero. if (blen_D == 0) { blen_D = DecodeBlockTypeAndLength(s, 2); dist_context_map_slice = s->dist_context_map + (s->block_type_rb[5] << 2); } distance_tree = s->distance_hgroup->htrees[dist_context_map_slice[distance_context]]; distance_code = getvlc(s, distance_tree); // Convert the distance code to the actual distance by possibly looking up past // distances from the s->ringbuffer. distance_context = 0; if ((distance_code & ~0xF) == 0) { // Take distance from ring buffer if (distance_code == 0) { --dist_rb_idx; distance_code = s->dist_rb[dist_rb_idx & 3]; // Compensate double distance-ring-buffer roll for dictionary items. distance_context = 1; } else { int dist = distance_code << 1; // kDistanceShortCodeIndexOffset has 2-bit values from LSB: 3, 2, 1, 0, 3, 3, 3, 3, // 3, 3, 2, 2, 2, 2, 2, 2 uint32_t const kDistanceShortCodeIndexOffset = 0xAAAF'FF1B; // kDistanceShortCodeValueOffset has 2-bit values from LSB: -0, 0,-0, 0,-1, 1,-2, // 2,-3, 3,-1, 1,-2, 2,-3, 3 uint32_t const kDistanceShortCodeValueOffset = 0xFA5F'A500; int v = (dist_rb_idx + (int)(kDistanceShortCodeIndexOffset >> dist)) & 0x3; distance_code = s->dist_rb[v]; v = (int)(kDistanceShortCodeValueOffset >> dist) & 0x3; if ((dist & 0x3) != 0) { distance_code += v; } else { distance_code -= v; if (distance_code <= 0) { // A huge distance will cause a failure later on. This is a little faster than // failing here. distance_code = 0x7FFF'FFFF; } } } } else { distval = distance_code - (int)s->num_direct_distance_codes; if (distval >= 0) { uint32_t nbits; int postfix; int offset; if (s->distance_postfix_bits == 0) { nbits = ((uint32_t)distval >> 1) + 1; offset = ((2 + (distval & 1)) << nbits) - 4; distance_code = (int)s->num_direct_distance_codes + offset + (int)getbits(s, nbits); } else { // This branch also works well when s->distance_postfix_bits == 0. uint32_t bits; postfix = distval & s->distance_postfix_mask; distval >>= s->distance_postfix_bits; nbits = ((uint32_t)distval >> 1) + 1; bits = getbits(s, nbits); offset = ((2 + (distval & 1)) << nbits) - 4; distance_code = (int)s->num_direct_distance_codes + ((offset + (int)bits) << s->distance_postfix_bits) + postfix; } } distance_code = distance_code - brotli_num_distance_short_codes + 1; } --blen_D; } max_distance = s->max_backward_distance; if (max_distance > (out + pos - s->outbase)) { max_distance = (int32_t)(out + pos - s->outbase); } // Apply copy of LZ77 back-reference, or static dictionary reference if the distance is // larger than the max LZ77 distance if (distance_code > max_distance) { // The maximum allowed distance is brotli_max_allowed_distance = 0x7FFF'FFFC. // With this choice, no signed overflow can occur after decoding // a special distance code (e.g., after adding 3 to the last distance). if (distance_code > brotli_max_allowed_distance || copy_length < brotli_min_dictionary_word_length || copy_length > brotli_max_dictionary_word_length) { // printf("distance_code = %d/%d, copy_length = %d\n", distance_code, (int)(out - // s->outbase), copy_length); s->error = -1; pos = meta_block_len; copy_length = 0; } else { auto offset = (int32_t)words->offsets_by_length[copy_length]; uint32_t shift = words->size_bits_by_length[copy_length]; uint32_t address = distance_code - max_distance - 1; int32_t word_idx = address & ((1 << shift) - 1); uint32_t transform_idx = address >> shift; // Compensate double distance-ring-buffer roll. dist_rb_idx += distance_context; offset += word_idx * copy_length; if (transform_idx == 0) { distance_code = -offset; } else if (transform_idx < kNumTransforms) { copy_length = TransformDictionaryWord( dict_scratch, &words->data[offset], copy_length, transform_idx); distance_code = 0; if (copy_length == 1) { // Special case for single byte output p2 = p1; p1 = dict_scratch[0]; out[pos++] = p1; copy_length = 0; } } else { // printf("transform_idx=%d/%d, distance_code = %d/%d, copy_length = %d\n", // transform_idx, kNumTransforms, distance_code, (int)(out - s->outbase), // copy_length); s->error = -1; pos = meta_block_len; copy_length = 0; } if (pos + copy_length > meta_block_len) { s->error = -1; pos = meta_block_len; copy_length = 0; } } } else { // Update the recent distances cache. s->dist_rb[dist_rb_idx & 3] = distance_code; ++dist_rb_idx; if (pos + copy_length > meta_block_len) { s->error = -1; pos = meta_block_len; copy_length = 0; } } } } } pos = shuffle(pos); copy_length = shuffle(copy_length); if (copy_length > 0) { uint8_t b; distance_code = shuffle(distance_code); if (distance_code > 0) { // Copy for (uint32_t i = t; i < copy_length; i += 32) { uint8_t const* src = out + pos + ((i >= (uint32_t)distance_code) ? (i % (uint32_t)distance_code) : i) - distance_code; b = *src; out[pos + i] = b; } } else { // Dictionary uint8_t const* src = (distance_code < 0) ? &words->data[-distance_code] : dict_scratch; if (t < copy_length) { b = src[t]; out[pos + t] = b; if (32 + t < copy_length) { b = src[32 + t]; out[pos + 32 + t] = b; } } } p1 = shuffle((uint32_t)b, (copy_length - 1) & 0x1f); p2 = shuffle((uint32_t)b, (copy_length - 2) & 0x1f); pos += copy_length; } } // Ensure all other threads have observed prior state of p1 & p2 before overwriting __syncwarp(); if (!t) { s->p1 = (uint8_t)p1; s->p2 = (uint8_t)p2; s->dist_rb_idx = dist_rb_idx; } } /** * @brief Brotli decoding kernel * See https://tools.ietf.org/html/rfc7932 * * blockDim = {block_size,1,1} * * @param[in] inputs Source buffer per block * @param[out] outputs Destination buffer per block * @param[out] results Decompressor status per block * @param scratch Intermediate device memory heap space (will be dynamically shared between blocks) * @param scratch_size Size of scratch heap space (smaller sizes may result in serialization between * blocks) */ __global__ void __launch_bounds__(block_size, 2) gpu_debrotli_kernel(device_span<device_span<uint8_t const> const> inputs, device_span<device_span<uint8_t> const> outputs, device_span<compression_result> results, uint8_t* scratch, uint32_t scratch_size) { __shared__ __align__(16) debrotli_state_s state_g; int t = threadIdx.x; auto const block_id = blockIdx.x; debrotli_state_s* const s = &state_g; if (block_id >= inputs.size()) { return; } // Thread0: initializes shared state and decode stream header if (!t) { auto const src = inputs[block_id].data(); auto const src_size = inputs[block_id].size(); if (src && src_size >= 8) { s->error = 0; s->out = outputs[block_id].data(); s->outbase = s->out; s->bytes_left = outputs[block_id].size(); s->mtf_upper_bound = 63; s->dist_rb[0] = 16; s->dist_rb[1] = 15; s->dist_rb[2] = 11; s->dist_rb[3] = 4; s->dist_rb_idx = 0; s->p1 = s->p2 = 0; initbits(s, src, src_size); DecodeStreamHeader(s); } else { s->error = 1; s->out = s->outbase = nullptr; } } __syncthreads(); if (!s->error) { // Main loop: decode meta-blocks do { __syncthreads(); if (!t) { // Thread0: Decode meta-block header DecodeMetaBlockHeader(s); if (!s->error && s->meta_block_len > s->bytes_left) { s->error = 2; } } __syncthreads(); if (!s->error && s->meta_block_len != 0) { if (s->is_uncompressed) { // Uncompressed block uint8_t const* src = s->cur + ((s->bitpos + 7) >> 3); uint8_t* dst = s->out; if (!t) { if (getbits_bytealign(s) != 0) { s->error = -1; } else if (src + s->meta_block_len > s->end) { s->error = 1; } else { initbits(s, s->base, s->end - s->base, src - s->base); } } __syncthreads(); if (!s->error) { // Simple block-wide memcpy for (int32_t i = t; i < s->meta_block_len; i += block_size) { dst[i] = src[i]; } } } else { // Compressed block if (!t) { // Thread0: Reset local heap, decode huffman tables s->heap_used = 0; s->heap_limit = (uint16_t)(sizeof(s->heap) / sizeof(s->heap[0])); s->fb_base = nullptr; s->fb_size = 0; DecodeHuffmanTables(s); if (!s->error) { DecodeHuffmanTreeGroups(s, scratch, scratch_size); } } __syncthreads(); if (!s->error) { // Warp0: Decode compressed block, warps 1..7 are all idle (!) if (t < 32) ProcessCommands(s, reinterpret_cast<brotli_dictionary_s*>(scratch + scratch_size), t); __syncthreads(); } // Free any allocated memory if (s->fb_base) { if (!t) { ext_heap_free(s->fb_base, s->fb_size, scratch, scratch_size); } __syncthreads(); } } // Update output byte count and position if (!t) { s->bytes_left -= s->meta_block_len; s->out += s->meta_block_len; } } __syncthreads(); } while (!s->error && !s->is_last && s->bytes_left != 0); } __syncthreads(); // Output decompression status if (!t) { results[block_id].bytes_written = s->out - s->outbase; results[block_id].status = (s->error == 0) ? compression_status::SUCCESS : compression_status::FAILURE; // Return ext heap used by last block (statistics) results[block_id].reserved = s->fb_size; } } /** * @brief Computes the size of temporary memory for Brotli decompression * * In most case, a brotli metablock will require in the order of ~10KB * to ~40KB of scratch space for various lookup tables (mainly context maps * and Huffman lookup tables), as well as temporary scratch space to decode * the header. However, because the syntax allows for a huge number of unique * tables, the theoretical worst case is quite large at ~1.3MB per threadblock, * which would scale with gpu occupancy. * * This is solved by a custom memory allocator that first allocates from a local * heap in shared mem (with the end of the heap being used as a stack for * intermediate small allocations). Once this is exhausted, the 'external' * heap is used, allocating from a single scratch surface shared between all * the threadblocks, such that allocation can't fail, but may cause serialization * between threadblocks should more than one threadblock ever allocate the worst * case size. * * @param[in] max_num_inputs The maximum number of compressed input chunks * * @return The size in bytes of required temporary memory */ size_t __host__ get_gpu_debrotli_scratch_size(int max_num_inputs) { int sm_count = 0; int dev = 0; uint32_t max_fb_size, min_fb_size, fb_size; CUDF_CUDA_TRY(cudaGetDevice(&dev)); if (cudaSuccess == cudaDeviceGetAttribute(&sm_count, cudaDevAttrMultiProcessorCount, dev)) { // printf("%d SMs on device %d\n", sm_count, dev); max_num_inputs = min(max_num_inputs, sm_count * 3); // no more than 3 blocks/sm at most due to 32KB smem use if (max_num_inputs <= 0) { max_num_inputs = sm_count * 2; // Target 2 blocks/SM by default for scratch mem computation } } max_num_inputs = min(max(max_num_inputs, 1), 512); // Max fb size per block occurs if all huffman tables for all 3 group types fail local_alloc() // with num_htrees=256 (See HuffmanTreeGroupAlloc) max_fb_size = 256 * (630 + 1080 + 920) * 2; // 1.3MB // Min avg fb size needed per block (typical) min_fb_size = 10 * 1024; // TODO: Gather some statistics for typical meta-block size // Allocate at least two worst-case metablocks or 1 metablock plus typical size for every other // block fb_size = max(max_fb_size * min(max_num_inputs, 2), max_fb_size + max_num_inputs * min_fb_size); // Add some room for alignment return fb_size + 16 + sizeof(brotli_dictionary_s); } #define DUMP_FB_HEAP 0 #if DUMP_FB_HEAP #include <stdio.h> #endif void gpu_debrotli(device_span<device_span<uint8_t const> const> inputs, device_span<device_span<uint8_t> const> outputs, device_span<compression_result> results, void* scratch, size_t scratch_size, rmm::cuda_stream_view stream) { auto const count = inputs.size(); uint32_t fb_heap_size; auto* scratch_u8 = static_cast<uint8_t*>(scratch); dim3 dim_block(block_size, 1); dim3 dim_grid(count, 1); // TODO: Check max grid dimensions vs max expected count CUDF_EXPECTS(scratch_size >= sizeof(brotli_dictionary_s), "Insufficient scratch space for debrotli"); scratch_size = min(scratch_size, static_cast<size_t>(0xffff'ffffu)); fb_heap_size = (uint32_t)((scratch_size - sizeof(brotli_dictionary_s)) & ~0xf); CUDF_CUDA_TRY(cudaMemsetAsync(scratch_u8, 0, 2 * sizeof(uint32_t), stream.value())); // NOTE: The 128KB dictionary copy can have a relatively large overhead since source isn't // page-locked CUDF_CUDA_TRY(cudaMemcpyAsync(scratch_u8 + fb_heap_size, get_brotli_dictionary(), sizeof(brotli_dictionary_s), cudaMemcpyDefault, stream.value())); gpu_debrotli_kernel<<<dim_grid, dim_block, 0, stream.value()>>>( inputs, outputs, results, scratch_u8, fb_heap_size); #if DUMP_FB_HEAP uint32_t dump[2]; uint32_t cur = 0; printf("heap dump (%d bytes)\n", fb_heap_size); while (cur < fb_heap_size && !(cur & 3)) { CUDF_CUDA_TRY(cudaMemcpyAsync( &dump[0], scratch_u8 + cur, 2 * sizeof(uint32_t), cudaMemcpyDefault, stream.value())); stream.synchronize(); printf("@%d: next = %d, size = %d\n", cur, dump[0], dump[1]); cur = (dump[0] > cur) ? dump[0] : 0xffff'ffffu; } #endif } } // namespace io } // namespace cudf
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/comp/unsnap.cu
/* * Copyright (c) 2018-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "gpuinflate.hpp" #include <io/utilities/block_utils.cuh> #include <rmm/cuda_stream_view.hpp> #include <cub/cub.cuh> namespace cudf { namespace io { constexpr int32_t batch_size = (1 << 5); constexpr int32_t batch_count = (1 << 2); constexpr int32_t prefetch_size = (1 << 9); // 512B, in 32B chunks constexpr bool log_cyclecount = false; void __device__ busy_wait(size_t cycles) { clock_t start = clock(); for (;;) { clock_t const now = clock(); clock_t const elapsed = now > start ? now - start : now + (0xffff'ffff - start); if (elapsed >= cycles) return; } } /** * @brief Describes a single LZ77 symbol (single entry in batch) */ struct unsnap_batch_s { int32_t len; // 1..64 = Number of bytes uint32_t offset; // copy distance if greater than zero or negative of literal offset in byte stream }; /** * @brief Queue structure used to exchange data between warps */ struct unsnap_queue_s { unsnap_queue_s() = default; // required to compile on ctk-12.2 + aarch64 uint32_t prefetch_wrpos; ///< Prefetcher write position uint32_t prefetch_rdpos; ///< Prefetch consumer read position int32_t prefetch_end; ///< Prefetch enable flag (nonzero stops prefetcher) int32_t batch_len[batch_count]; ///< Length of each batch - <0:end, 0:not ready, >0:symbol count unsnap_batch_s batch[batch_count * batch_size]; ///< LZ77 batch data uint8_t buf[prefetch_size]; ///< Prefetch buffer }; /** * @brief snappy decompression state */ struct unsnap_state_s { constexpr unsnap_state_s() noexcept {} // required to compile on ctk-12.2 + aarch64 uint8_t const* base{}; ///< base ptr of compressed stream uint8_t const* end{}; ///< end of compressed stream uint32_t uncompressed_size{}; ///< uncompressed stream size uint32_t bytes_left{}; ///< remaining bytes to decompress int32_t error{}; ///< current error status uint32_t tstart{}; ///< start time for perf logging volatile unsnap_queue_s q{}; ///< queue for cross-warp communication device_span<uint8_t const> src; ///< input for current block device_span<uint8_t> dst; ///< output for current block }; inline __device__ volatile uint8_t& byte_access(unsnap_state_s* s, uint32_t pos) { return s->q.buf[pos & (prefetch_size - 1)]; } /** * @brief prefetches data for the symbol decoding stage * * @param s decompression state * @param t warp lane id */ __device__ void snappy_prefetch_bytestream(unsnap_state_s* s, int t) { uint8_t const* base = s->base; auto end = (uint32_t)(s->end - base); auto align_bytes = (uint32_t)(0x20 - (0x1f & reinterpret_cast<uintptr_t>(base))); int32_t pos = min(align_bytes, end); int32_t blen; // Start by prefetching up to the next a 32B-aligned location if (t < pos) { s->q.buf[t] = base[t]; } blen = 0; do { __syncwarp(); if (!t) { uint32_t minrdpos; s->q.prefetch_wrpos = pos; minrdpos = pos - min(pos, prefetch_size - 32u); blen = (int)min(32u, end - pos); for (;;) { uint32_t rdpos = s->q.prefetch_rdpos; if (rdpos >= minrdpos) break; if (s->q.prefetch_end) { blen = 0; break; } busy_wait(20); } } blen = shuffle(blen); if (t < blen) { byte_access(s, pos + t) = base[pos + t]; } pos += blen; } while (blen > 0); } /** * @brief Lookup table for get_len3_mask() * * Indexed by a 10-bit pattern, contains the corresponding 4-bit mask of * 3-byte code lengths in the lower 4 bits, along with the total number of * bytes used for coding the four lengths in the upper 4 bits. * The upper 4-bit value could also be obtained by 8+__popc(mask4) * * for (uint32_t k = 0; k < 1024; k++) * { * for (uint32_t i = 0, v = 0, b = k, n = 0; i < 4; i++) * { * v |= (b & 1) << i; * n += (b & 1) + 2; * b >>= (b & 1) + 2; * } * k_len3lut[k] = v | (n << 4); * } */ static const uint8_t __device__ __constant__ k_len3lut[1 << 10] = { 0x80, 0x91, 0x80, 0x91, 0x92, 0x91, 0x92, 0x91, 0x80, 0xa3, 0x80, 0xa3, 0x92, 0xa3, 0x92, 0xa3, 0x94, 0x91, 0x94, 0x91, 0x92, 0x91, 0x92, 0x91, 0x94, 0xa3, 0x94, 0xa3, 0x92, 0xa3, 0x92, 0xa3, 0x80, 0xa5, 0x80, 0xa5, 0xa6, 0xa5, 0xa6, 0xa5, 0x80, 0xa3, 0x80, 0xa3, 0xa6, 0xa3, 0xa6, 0xa3, 0x94, 0xa5, 0x94, 0xa5, 0xa6, 0xa5, 0xa6, 0xa5, 0x94, 0xa3, 0x94, 0xa3, 0xa6, 0xa3, 0xa6, 0xa3, 0x98, 0x91, 0x98, 0x91, 0x92, 0x91, 0x92, 0x91, 0x98, 0xb7, 0x98, 0xb7, 0x92, 0xb7, 0x92, 0xb7, 0x94, 0x91, 0x94, 0x91, 0x92, 0x91, 0x92, 0x91, 0x94, 0xb7, 0x94, 0xb7, 0x92, 0xb7, 0x92, 0xb7, 0x98, 0xa5, 0x98, 0xa5, 0xa6, 0xa5, 0xa6, 0xa5, 0x98, 0xb7, 0x98, 0xb7, 0xa6, 0xb7, 0xa6, 0xb7, 0x94, 0xa5, 0x94, 0xa5, 0xa6, 0xa5, 0xa6, 0xa5, 0x94, 0xb7, 0x94, 0xb7, 0xa6, 0xb7, 0xa6, 0xb7, 0x80, 0xa9, 0x80, 0xa9, 0xaa, 0xa9, 0xaa, 0xa9, 0x80, 0xa3, 0x80, 0xa3, 0xaa, 0xa3, 0xaa, 0xa3, 0xac, 0xa9, 0xac, 0xa9, 0xaa, 0xa9, 0xaa, 0xa9, 0xac, 0xa3, 0xac, 0xa3, 0xaa, 0xa3, 0xaa, 0xa3, 0x80, 0xa5, 0x80, 0xa5, 0xa6, 0xa5, 0xa6, 0xa5, 0x80, 0xa3, 0x80, 0xa3, 0xa6, 0xa3, 0xa6, 0xa3, 0xac, 0xa5, 0xac, 0xa5, 0xa6, 0xa5, 0xa6, 0xa5, 0xac, 0xa3, 0xac, 0xa3, 0xa6, 0xa3, 0xa6, 0xa3, 0x98, 0xa9, 0x98, 0xa9, 0xaa, 0xa9, 0xaa, 0xa9, 0x98, 0xb7, 0x98, 0xb7, 0xaa, 0xb7, 0xaa, 0xb7, 0xac, 0xa9, 0xac, 0xa9, 0xaa, 0xa9, 0xaa, 0xa9, 0xac, 0xb7, 0xac, 0xb7, 0xaa, 0xb7, 0xaa, 0xb7, 0x98, 0xa5, 0x98, 0xa5, 0xa6, 0xa5, 0xa6, 0xa5, 0x98, 0xb7, 0x98, 0xb7, 0xa6, 0xb7, 0xa6, 0xb7, 0xac, 0xa5, 0xac, 0xa5, 0xa6, 0xa5, 0xa6, 0xa5, 0xac, 0xb7, 0xac, 0xb7, 0xa6, 0xb7, 0xa6, 0xb7, 0x80, 0x91, 0x80, 0x91, 0x92, 0x91, 0x92, 0x91, 0x80, 0xbb, 0x80, 0xbb, 0x92, 0xbb, 0x92, 0xbb, 0x94, 0x91, 0x94, 0x91, 0x92, 0x91, 0x92, 0x91, 0x94, 0xbb, 0x94, 0xbb, 0x92, 0xbb, 0x92, 0xbb, 0x80, 0xbd, 0x80, 0xbd, 0xbe, 0xbd, 0xbe, 0xbd, 0x80, 0xbb, 0x80, 0xbb, 0xbe, 0xbb, 0xbe, 0xbb, 0x94, 0xbd, 0x94, 0xbd, 0xbe, 0xbd, 0xbe, 0xbd, 0x94, 0xbb, 0x94, 0xbb, 0xbe, 0xbb, 0xbe, 0xbb, 0x98, 0x91, 0x98, 0x91, 0x92, 0x91, 0x92, 0x91, 0x98, 0xb7, 0x98, 0xb7, 0x92, 0xb7, 0x92, 0xb7, 0x94, 0x91, 0x94, 0x91, 0x92, 0x91, 0x92, 0x91, 0x94, 0xb7, 0x94, 0xb7, 0x92, 0xb7, 0x92, 0xb7, 0x98, 0xbd, 0x98, 0xbd, 0xbe, 0xbd, 0xbe, 0xbd, 0x98, 0xb7, 0x98, 0xb7, 0xbe, 0xb7, 0xbe, 0xb7, 0x94, 0xbd, 0x94, 0xbd, 0xbe, 0xbd, 0xbe, 0xbd, 0x94, 0xb7, 0x94, 0xb7, 0xbe, 0xb7, 0xbe, 0xb7, 0x80, 0xa9, 0x80, 0xa9, 0xaa, 0xa9, 0xaa, 0xa9, 0x80, 0xbb, 0x80, 0xbb, 0xaa, 0xbb, 0xaa, 0xbb, 0xac, 0xa9, 0xac, 0xa9, 0xaa, 0xa9, 0xaa, 0xa9, 0xac, 0xbb, 0xac, 0xbb, 0xaa, 0xbb, 0xaa, 0xbb, 0x80, 0xbd, 0x80, 0xbd, 0xbe, 0xbd, 0xbe, 0xbd, 0x80, 0xbb, 0x80, 0xbb, 0xbe, 0xbb, 0xbe, 0xbb, 0xac, 0xbd, 0xac, 0xbd, 0xbe, 0xbd, 0xbe, 0xbd, 0xac, 0xbb, 0xac, 0xbb, 0xbe, 0xbb, 0xbe, 0xbb, 0x98, 0xa9, 0x98, 0xa9, 0xaa, 0xa9, 0xaa, 0xa9, 0x98, 0xb7, 0x98, 0xb7, 0xaa, 0xb7, 0xaa, 0xb7, 0xac, 0xa9, 0xac, 0xa9, 0xaa, 0xa9, 0xaa, 0xa9, 0xac, 0xb7, 0xac, 0xb7, 0xaa, 0xb7, 0xaa, 0xb7, 0x98, 0xbd, 0x98, 0xbd, 0xbe, 0xbd, 0xbe, 0xbd, 0x98, 0xb7, 0x98, 0xb7, 0xbe, 0xb7, 0xbe, 0xb7, 0xac, 0xbd, 0xac, 0xbd, 0xbe, 0xbd, 0xbe, 0xbd, 0xac, 0xb7, 0xac, 0xb7, 0xbe, 0xb7, 0xbe, 0xb7, 0x80, 0x91, 0x80, 0x91, 0x92, 0x91, 0x92, 0x91, 0x80, 0xa3, 0x80, 0xa3, 0x92, 0xa3, 0x92, 0xa3, 0x94, 0x91, 0x94, 0x91, 0x92, 0x91, 0x92, 0x91, 0x94, 0xa3, 0x94, 0xa3, 0x92, 0xa3, 0x92, 0xa3, 0x80, 0xa5, 0x80, 0xa5, 0xa6, 0xa5, 0xa6, 0xa5, 0x80, 0xa3, 0x80, 0xa3, 0xa6, 0xa3, 0xa6, 0xa3, 0x94, 0xa5, 0x94, 0xa5, 0xa6, 0xa5, 0xa6, 0xa5, 0x94, 0xa3, 0x94, 0xa3, 0xa6, 0xa3, 0xa6, 0xa3, 0x98, 0x91, 0x98, 0x91, 0x92, 0x91, 0x92, 0x91, 0x98, 0xcf, 0x98, 0xcf, 0x92, 0xcf, 0x92, 0xcf, 0x94, 0x91, 0x94, 0x91, 0x92, 0x91, 0x92, 0x91, 0x94, 0xcf, 0x94, 0xcf, 0x92, 0xcf, 0x92, 0xcf, 0x98, 0xa5, 0x98, 0xa5, 0xa6, 0xa5, 0xa6, 0xa5, 0x98, 0xcf, 0x98, 0xcf, 0xa6, 0xcf, 0xa6, 0xcf, 0x94, 0xa5, 0x94, 0xa5, 0xa6, 0xa5, 0xa6, 0xa5, 0x94, 0xcf, 0x94, 0xcf, 0xa6, 0xcf, 0xa6, 0xcf, 0x80, 0xa9, 0x80, 0xa9, 0xaa, 0xa9, 0xaa, 0xa9, 0x80, 0xa3, 0x80, 0xa3, 0xaa, 0xa3, 0xaa, 0xa3, 0xac, 0xa9, 0xac, 0xa9, 0xaa, 0xa9, 0xaa, 0xa9, 0xac, 0xa3, 0xac, 0xa3, 0xaa, 0xa3, 0xaa, 0xa3, 0x80, 0xa5, 0x80, 0xa5, 0xa6, 0xa5, 0xa6, 0xa5, 0x80, 0xa3, 0x80, 0xa3, 0xa6, 0xa3, 0xa6, 0xa3, 0xac, 0xa5, 0xac, 0xa5, 0xa6, 0xa5, 0xa6, 0xa5, 0xac, 0xa3, 0xac, 0xa3, 0xa6, 0xa3, 0xa6, 0xa3, 0x98, 0xa9, 0x98, 0xa9, 0xaa, 0xa9, 0xaa, 0xa9, 0x98, 0xcf, 0x98, 0xcf, 0xaa, 0xcf, 0xaa, 0xcf, 0xac, 0xa9, 0xac, 0xa9, 0xaa, 0xa9, 0xaa, 0xa9, 0xac, 0xcf, 0xac, 0xcf, 0xaa, 0xcf, 0xaa, 0xcf, 0x98, 0xa5, 0x98, 0xa5, 0xa6, 0xa5, 0xa6, 0xa5, 0x98, 0xcf, 0x98, 0xcf, 0xa6, 0xcf, 0xa6, 0xcf, 0xac, 0xa5, 0xac, 0xa5, 0xa6, 0xa5, 0xa6, 0xa5, 0xac, 0xcf, 0xac, 0xcf, 0xa6, 0xcf, 0xa6, 0xcf, 0x80, 0x91, 0x80, 0x91, 0x92, 0x91, 0x92, 0x91, 0x80, 0xbb, 0x80, 0xbb, 0x92, 0xbb, 0x92, 0xbb, 0x94, 0x91, 0x94, 0x91, 0x92, 0x91, 0x92, 0x91, 0x94, 0xbb, 0x94, 0xbb, 0x92, 0xbb, 0x92, 0xbb, 0x80, 0xbd, 0x80, 0xbd, 0xbe, 0xbd, 0xbe, 0xbd, 0x80, 0xbb, 0x80, 0xbb, 0xbe, 0xbb, 0xbe, 0xbb, 0x94, 0xbd, 0x94, 0xbd, 0xbe, 0xbd, 0xbe, 0xbd, 0x94, 0xbb, 0x94, 0xbb, 0xbe, 0xbb, 0xbe, 0xbb, 0x98, 0x91, 0x98, 0x91, 0x92, 0x91, 0x92, 0x91, 0x98, 0xcf, 0x98, 0xcf, 0x92, 0xcf, 0x92, 0xcf, 0x94, 0x91, 0x94, 0x91, 0x92, 0x91, 0x92, 0x91, 0x94, 0xcf, 0x94, 0xcf, 0x92, 0xcf, 0x92, 0xcf, 0x98, 0xbd, 0x98, 0xbd, 0xbe, 0xbd, 0xbe, 0xbd, 0x98, 0xcf, 0x98, 0xcf, 0xbe, 0xcf, 0xbe, 0xcf, 0x94, 0xbd, 0x94, 0xbd, 0xbe, 0xbd, 0xbe, 0xbd, 0x94, 0xcf, 0x94, 0xcf, 0xbe, 0xcf, 0xbe, 0xcf, 0x80, 0xa9, 0x80, 0xa9, 0xaa, 0xa9, 0xaa, 0xa9, 0x80, 0xbb, 0x80, 0xbb, 0xaa, 0xbb, 0xaa, 0xbb, 0xac, 0xa9, 0xac, 0xa9, 0xaa, 0xa9, 0xaa, 0xa9, 0xac, 0xbb, 0xac, 0xbb, 0xaa, 0xbb, 0xaa, 0xbb, 0x80, 0xbd, 0x80, 0xbd, 0xbe, 0xbd, 0xbe, 0xbd, 0x80, 0xbb, 0x80, 0xbb, 0xbe, 0xbb, 0xbe, 0xbb, 0xac, 0xbd, 0xac, 0xbd, 0xbe, 0xbd, 0xbe, 0xbd, 0xac, 0xbb, 0xac, 0xbb, 0xbe, 0xbb, 0xbe, 0xbb, 0x98, 0xa9, 0x98, 0xa9, 0xaa, 0xa9, 0xaa, 0xa9, 0x98, 0xcf, 0x98, 0xcf, 0xaa, 0xcf, 0xaa, 0xcf, 0xac, 0xa9, 0xac, 0xa9, 0xaa, 0xa9, 0xaa, 0xa9, 0xac, 0xcf, 0xac, 0xcf, 0xaa, 0xcf, 0xaa, 0xcf, 0x98, 0xbd, 0x98, 0xbd, 0xbe, 0xbd, 0xbe, 0xbd, 0x98, 0xcf, 0x98, 0xcf, 0xbe, 0xcf, 0xbe, 0xcf, 0xac, 0xbd, 0xac, 0xbd, 0xbe, 0xbd, 0xbe, 0xbd, 0xac, 0xcf, 0xac, 0xcf, 0xbe, 0xcf, 0xbe, 0xcf}; /** * @brief Returns a 32-bit mask where 1 means 3-byte code length and 0 means 2-byte * code length, given an input mask of up to 96 bits. * * Implemented by doing 8 consecutive lookups, building the result 4-bit at a time */ inline __device__ uint32_t get_len3_mask(uint32_t v0, uint32_t v1, uint32_t v2) { uint32_t m, v, m4, n; v = v0; m4 = k_len3lut[v & 0x3ff]; m = m4 & 0xf; n = m4 >> 4; // 8..12 v = v0 >> n; m4 = k_len3lut[v & 0x3ff]; m |= (m4 & 0xf) << 4; n += m4 >> 4; // 16..24 v = __funnelshift_r(v0, v1, n); m4 = k_len3lut[v & 0x3ff]; m |= (m4 & 0xf) << 8; n += m4 >> 4; // 24..36 v >>= (m4 >> 4); m4 = k_len3lut[v & 0x3ff]; m |= (m4 & 0xf) << 12; n = (n + (m4 >> 4)) & 0x1f; // (32..48) % 32 = 0..16 v1 = __funnelshift_r(v1, v2, n); v2 >>= n; v = v1; m4 = k_len3lut[v & 0x3ff]; m |= (m4 & 0xf) << 16; n = m4 >> 4; // 8..12 v = v1 >> n; m4 = k_len3lut[v & 0x3ff]; m |= (m4 & 0xf) << 20; n += m4 >> 4; // 16..24 v = __funnelshift_r(v1, v2, n); m4 = k_len3lut[v & 0x3ff]; m |= (m4 & 0xf) << 24; n += m4 >> 4; // 24..36 v >>= (m4 >> 4); m4 = k_len3lut[v & 0x3ff]; m |= (m4 & 0xf) << 28; return m; } /** * @brief Returns a 32-bit mask where each 2-bit pair contains the symbol length * minus 2, given two input masks each containing bit0 or bit1 of the corresponding * code length minus 2 for up to 32 bytes */ inline __device__ uint32_t get_len5_mask(uint32_t v0, uint32_t v1) { uint32_t m; m = (v1 & 1) * 2 + (v0 & 1); v0 >>= (m + 2); v1 >>= (m + 1); for (uint32_t i = 1; i < 16; i++) { uint32_t m2 = (v1 & 2) | (v0 & 1); uint32_t n = m2 + 2; m |= m2 << (i * 2); v0 >>= n; v1 >>= n; } return m; } /** * @brief decode symbols and output LZ77 batches (single-warp) * * @param s decompression state * @param t warp lane id */ __device__ void snappy_decode_symbols(unsnap_state_s* s, uint32_t t) { uint32_t cur = 0; auto end = static_cast<uint32_t>(s->end - s->base); uint32_t bytes_left = s->uncompressed_size; uint32_t dst_pos = 0; int32_t batch = 0; for (;;) { int32_t batch_len; volatile unsnap_batch_s* b; // Wait for prefetcher if (t == 0) { s->q.prefetch_rdpos = cur; #pragma unroll(1) // We don't want unrolling here while (s->q.prefetch_wrpos < min(cur + 5 * batch_size, end)) { busy_wait(10); } b = &s->q.batch[batch * batch_size]; } // Process small symbols in parallel: for data that does not get good compression, // the stream will consist of a large number of short literals (1-byte or 2-byte) // followed by short repeat runs. This results in many 2-byte or 3-byte symbols // that can all be decoded in parallel once we know the symbol length. { uint32_t v0, v1, v2, len3_mask, cur_t, is_long_sym, short_sym_mask; uint32_t b0; cur = shuffle(cur); cur_t = cur + t; b0 = byte_access(s, cur_t); v0 = ballot((b0 == 4) || (b0 & 2)); b0 = byte_access(s, cur_t + 32); v1 = ballot((b0 == 4) || (b0 & 2)); b0 = byte_access(s, cur_t + 64); v2 = ballot((b0 == 4) || (b0 & 2)); len3_mask = shuffle((t == 0) ? get_len3_mask(v0, v1, v2) : 0); cur_t = cur + 2 * t + __popc(len3_mask & ((1 << t) - 1)); b0 = byte_access(s, cur_t); is_long_sym = ((b0 & ~4) != 0) && (((b0 + 1) & 2) == 0); short_sym_mask = ballot(is_long_sym); batch_len = 0; b = reinterpret_cast<volatile unsnap_batch_s*>(shuffle(reinterpret_cast<uintptr_t>(b))); if (!(short_sym_mask & 1)) { batch_len = shuffle((t == 0) ? (short_sym_mask) ? __ffs(short_sym_mask) - 1 : 32 : 0); if (batch_len != 0) { uint32_t blen = 0; int32_t ofs = 0; if (t < batch_len) { blen = (b0 & 1) ? ((b0 >> 2) & 7) + 4 : ((b0 >> 2) + 1); ofs = (b0 & 1) ? ((b0 & 0xe0) << 3) | byte_access(s, cur_t + 1) : (b0 & 2) ? byte_access(s, cur_t + 1) | (byte_access(s, cur_t + 2) << 8) : -(int32_t)(cur_t + 1); b[t].len = blen; b[t].offset = ofs; ofs += blen; // for correct out-of-range detection below } blen = WarpReducePos32(blen, t); bytes_left = shuffle(bytes_left); dst_pos = shuffle(dst_pos); short_sym_mask = __ffs(ballot(blen > bytes_left || ofs > (int32_t)(dst_pos + blen))); if (short_sym_mask != 0) { batch_len = min(batch_len, short_sym_mask - 1); } if (batch_len != 0) { blen = shuffle(blen, batch_len - 1); cur = shuffle(cur_t, batch_len - 1) + 2 + ((len3_mask >> (batch_len - 1)) & 1); if (t == 0) { dst_pos += blen; bytes_left -= blen; } } } } // Check if the batch was stopped by a 3-byte or 4-byte literal if (batch_len < batch_size - 2 && shuffle(b0 & ~4, batch_len) == 8) { // If so, run a slower version of the above that can also handle 3/4-byte literal sequences uint32_t batch_add; do { uint32_t clen, mask_t; cur_t = cur + t; b0 = byte_access(s, cur_t); clen = (b0 & 3) ? (b0 & 2) ? 1 : 0 : (b0 >> 2); // symbol length minus 2 v0 = ballot(clen & 1); v1 = ballot((clen >> 1) & 1); len3_mask = shuffle((t == 0) ? get_len5_mask(v0, v1) : 0); mask_t = (1 << (2 * t)) - 1; cur_t = cur + 2 * t + 2 * __popc((len3_mask & 0xaaaa'aaaa) & mask_t) + __popc((len3_mask & 0x5555'5555) & mask_t); b0 = byte_access(s, cur_t); is_long_sym = ((b0 & 3) ? ((b0 & 3) == 3) : (b0 > 3 * 4)) || (cur_t >= cur + 32) || (batch_len + t >= batch_size); batch_add = __ffs(ballot(is_long_sym)) - 1; if (batch_add != 0) { uint32_t blen = 0; int32_t ofs = 0; if (t < batch_add) { blen = (b0 & 1) ? ((b0 >> 2) & 7) + 4 : ((b0 >> 2) + 1); ofs = (b0 & 1) ? ((b0 & 0xe0) << 3) | byte_access(s, cur_t + 1) : (b0 & 2) ? byte_access(s, cur_t + 1) | (byte_access(s, cur_t + 2) << 8) : -(int32_t)(cur_t + 1); b[batch_len + t].len = blen; b[batch_len + t].offset = ofs; ofs += blen; // for correct out-of-range detection below } blen = WarpReducePos32(blen, t); bytes_left = shuffle(bytes_left); dst_pos = shuffle(dst_pos); short_sym_mask = __ffs(ballot(blen > bytes_left || ofs > (int32_t)(dst_pos + blen))); if (short_sym_mask != 0) { batch_add = min(batch_add, short_sym_mask - 1); } if (batch_add != 0) { blen = shuffle(blen, batch_add - 1); cur = shuffle(cur_t, batch_add - 1) + 2 + ((len3_mask >> ((batch_add - 1) * 2)) & 3); if (t == 0) { dst_pos += blen; bytes_left -= blen; } batch_len += batch_add; } } } while (batch_add >= 6 && batch_len < batch_size - 2); } } if (t == 0) { while (bytes_left > 0 && batch_len < batch_size) { uint32_t blen, offset; uint8_t b0 = byte_access(s, cur); if (b0 & 3) { uint8_t b1 = byte_access(s, cur + 1); if (!(b0 & 2)) { // xxxxxx01.oooooooo: copy with 3-bit length, 11-bit offset offset = ((b0 & 0xe0) << 3) | b1; blen = ((b0 >> 2) & 7) + 4; cur += 2; } else { // xxxxxx1x: copy with 6-bit length, 2-byte or 4-byte offset offset = b1 | (byte_access(s, cur + 2) << 8); if (b0 & 1) // 4-byte offset { offset |= (byte_access(s, cur + 3) << 16) | (byte_access(s, cur + 4) << 24); cur += 5; } else { cur += 3; } blen = (b0 >> 2) + 1; } dst_pos += blen; if (offset - 1u >= dst_pos || bytes_left < blen) break; bytes_left -= blen; } else if (b0 < 4 * 4) { // 0000xx00: short literal blen = (b0 >> 2) + 1; offset = -(int32_t)(cur + 1); cur += 1 + blen; dst_pos += blen; if (bytes_left < blen) break; bytes_left -= blen; } else { // xxxxxx00: literal blen = b0 >> 2; if (blen >= 60) { uint32_t num_bytes = blen - 59; blen = byte_access(s, cur + 1); if (num_bytes > 1) { blen |= byte_access(s, cur + 2) << 8; if (num_bytes > 2) { blen |= byte_access(s, cur + 3) << 16; if (num_bytes > 3) { blen |= byte_access(s, cur + 4) << 24; } } } cur += num_bytes; } cur += 1; blen += 1; offset = -(int32_t)cur; cur += blen; // Wait for prefetcher s->q.prefetch_rdpos = cur; #pragma unroll(1) // We don't want unrolling here while (s->q.prefetch_wrpos < min(cur + 5 * batch_size, end)) { busy_wait(10); } dst_pos += blen; if (bytes_left < blen) break; bytes_left -= blen; } b[batch_len].len = blen; b[batch_len].offset = offset; batch_len++; } if (batch_len != 0) { s->q.batch_len[batch] = batch_len; batch = (batch + 1) & (batch_count - 1); } } batch_len = shuffle(batch_len); if (t == 0) { while (s->q.batch_len[batch] != 0) { busy_wait(20); } } if (batch_len != batch_size) { break; } } if (!t) { s->q.prefetch_end = 1; s->q.batch_len[batch] = -1; s->bytes_left = bytes_left; if (bytes_left != 0) { s->error = -2; } } } /** * @brief process LZ77 symbols and output uncompressed stream * * @param s decompression state * @param t thread id within participating group (lane id) * @param temp_storage temporary storage used by the algorithm * * NOTE: No error checks at this stage (WARP0 responsible for not sending offsets and lengths that *would result in out-of-bounds accesses) */ template <typename Storage> __device__ void snappy_process_symbols(unsnap_state_s* s, int t, Storage& temp_storage) { auto const literal_base = s->base; auto out = s->dst.data(); int batch = 0; do { volatile unsnap_batch_s* b = &s->q.batch[batch * batch_size]; int32_t batch_len, blen_t, dist_t; if (t == 0) { while ((batch_len = s->q.batch_len[batch]) == 0) { busy_wait(20); } } else { batch_len = 0; } batch_len = shuffle(batch_len); if (batch_len <= 0) { break; } if (t < batch_len) { blen_t = b[t].len; dist_t = b[t].offset; } else { blen_t = dist_t = 0; } // Try to combine as many small entries as possible, but try to avoid doing that // if we see a small repeat distance 8 bytes or less if (shuffle(min((uint32_t)dist_t, (uint32_t)shuffle_xor(dist_t, 1))) > 8) { uint32_t n; do { uint32_t bofs = WarpReducePos32(blen_t, t); uint32_t stop_mask = ballot((uint32_t)dist_t < bofs); uint32_t start_mask = cub::WarpReduce<uint32_t>(temp_storage).Sum((bofs < 32 && t < batch_len) ? 1 << bofs : 0); start_mask = shuffle(start_mask); n = min(min((uint32_t)__popc(start_mask), (uint32_t)(__ffs(stop_mask) - 1u)), (uint32_t)batch_len); if (n != 0) { uint32_t it = __popc(start_mask & ((2 << t) - 1)); uint32_t tr = t - shuffle(bofs - blen_t, it); int32_t dist = shuffle(dist_t, it); if (it < n) { uint8_t const* src = (dist > 0) ? (out + t - dist) : (literal_base + tr - dist); out[t] = *src; } out += shuffle(bofs, n - 1); blen_t = shuffle(blen_t, (n + t) & 0x1f); dist_t = shuffle(dist_t, (n + t) & 0x1f); batch_len -= n; } } while (n >= 4); } for (int i = 0; i < batch_len; i++) { int32_t blen = shuffle(blen_t, i); int32_t dist = shuffle(dist_t, i); int32_t blen2 = (i + 1 < batch_len) ? shuffle(blen_t, i + 1) : 32; // Try to combine consecutive small entries if they are independent if ((uint32_t)dist >= (uint32_t)blen && blen + blen2 <= 32) { int32_t dist2 = shuffle(dist_t, i + 1); if ((uint32_t)dist2 >= (uint32_t)(blen + blen2)) { int32_t d; if (t < blen) { d = dist; } else { dist = dist2; d = (dist2 <= 0) ? dist2 + blen : dist2; } blen += blen2; if (t < blen) { uint8_t const* src = (dist > 0) ? (out - d) : (literal_base - d); out[t] = src[t]; } out += blen; i++; continue; } } if (dist > 0) { // Copy uint8_t b0, b1; if (t < blen) { uint32_t pos = t; uint8_t const* src = out + ((pos >= dist) ? (pos % dist) : pos) - dist; b0 = *src; } if (32 + t < blen) { uint32_t pos = 32 + t; uint8_t const* src = out + ((pos >= dist) ? (pos % dist) : pos) - dist; b1 = *src; } if (t < blen) { out[t] = b0; } if (32 + t < blen) { out[32 + t] = b1; } } else { // Literal uint8_t b0, b1; dist = -dist; while (blen >= 64) { b0 = literal_base[dist + t]; b1 = literal_base[dist + 32 + t]; out[t] = b0; out[32 + t] = b1; dist += 64; out += 64; blen -= 64; } if (t < blen) { b0 = literal_base[dist + t]; } if (32 + t < blen) { b1 = literal_base[dist + 32 + t]; } if (t < blen) { out[t] = b0; } if (32 + t < blen) { out[32 + t] = b1; } } out += blen; } __syncwarp(); if (t == 0) { s->q.batch_len[batch] = 0; } batch = (batch + 1) & (batch_count - 1); } while (true); } /** * @brief Snappy decompression kernel * See http://github.com/google/snappy/blob/master/format_description.txt * * blockDim {128,1,1} * * @param[in] inputs Source & destination information per block * @param[out] outputs Decompression status per block */ template <int block_size> __global__ void __launch_bounds__(block_size) unsnap_kernel(device_span<device_span<uint8_t const> const> inputs, device_span<device_span<uint8_t> const> outputs, device_span<compression_result> results) { __shared__ __align__(16) unsnap_state_s state_g; __shared__ cub::WarpReduce<uint32_t>::TempStorage temp_storage; int t = threadIdx.x; unsnap_state_s* s = &state_g; int strm_id = blockIdx.x; if (t < batch_count) { s->q.batch_len[t] = 0; } __syncthreads(); if (!t) { s->src = inputs[strm_id]; s->dst = outputs[strm_id]; auto cur = s->src.begin(); auto const end = s->src.end(); s->error = 0; if (log_cyclecount) { s->tstart = clock(); } if (cur < end) { // Read uncompressed size (varint), limited to 32-bit uint32_t uncompressed_size = *cur++; if (uncompressed_size > 0x7f) { uint32_t c = (cur < end) ? *cur++ : 0; uncompressed_size = (uncompressed_size & 0x7f) | (c << 7); if (uncompressed_size >= (0x80 << 7)) { c = (cur < end) ? *cur++ : 0; uncompressed_size = (uncompressed_size & ((0x7f << 7) | 0x7f)) | (c << 14); if (uncompressed_size >= (0x80 << 14)) { c = (cur < end) ? *cur++ : 0; uncompressed_size = (uncompressed_size & ((0x7f << 14) | (0x7f << 7) | 0x7f)) | (c << 21); if (uncompressed_size >= (0x80 << 21)) { c = (cur < end) ? *cur++ : 0; if (c < 0x8) uncompressed_size = (uncompressed_size & ((0x7f << 21) | (0x7f << 14) | (0x7f << 7) | 0x7f)) | (c << 28); else s->error = -1; } } } } s->uncompressed_size = uncompressed_size; s->bytes_left = uncompressed_size; s->base = cur; s->end = end; if ((cur >= end && uncompressed_size != 0) || (uncompressed_size > s->dst.size())) { s->error = -1; } } else { s->error = -1; } s->q.prefetch_end = 0; s->q.prefetch_wrpos = 0; s->q.prefetch_rdpos = 0; } __syncthreads(); if (!s->error) { if (t < 32) { // WARP0: decode lengths and offsets snappy_decode_symbols(s, t); } else if (t < 64) { // WARP1: prefetch byte stream for WARP0 snappy_prefetch_bytestream(s, t & 0x1f); } else if (t < 96) { // WARP2: LZ77 snappy_process_symbols(s, t & 0x1f, temp_storage); } __syncthreads(); } if (!t) { results[strm_id].bytes_written = s->uncompressed_size - s->bytes_left; results[strm_id].status = (s->error == 0) ? compression_status::SUCCESS : compression_status::FAILURE; if (log_cyclecount) { results[strm_id].reserved = clock() - s->tstart; } else { results[strm_id].reserved = 0; } } } void gpu_unsnap(device_span<device_span<uint8_t const> const> inputs, device_span<device_span<uint8_t> const> outputs, device_span<compression_result> results, rmm::cuda_stream_view stream) { dim3 dim_block(128, 1); // 4 warps per stream, 1 stream per block dim3 dim_grid(inputs.size(), 1); // TODO: Check max grid dimensions vs max expected count unsnap_kernel<128><<<dim_grid, dim_block, 0, stream.value()>>>(inputs, outputs, results); } } // namespace io } // namespace cudf
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/comp/brotli_dict.cpp
/* * Copyright (c) 2018-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * Portions of this file are derived from Google's Brotli project at * https://github.com/google/brotli, original license text below. */ /* Copyright 2013 Google Inc. All Rights Reserved. Distributed under MIT license. See file LICENSE for detail or copy at https://opensource.org/licenses/MIT */ /* Copyright(c) 2009, 2010, 2013 - 2016 by the Brotli Authors. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files(the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and / or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions : The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ #include "brotli_dict.hpp" #include <cstdint> namespace cudf { namespace io { static const brotli_dictionary_s g_dictionary = { // size_bits_by_length {0, 0, 0, 0, 10, 10, 11, 11, 10, 10, 10, 10, 10, 9, 9, 8, 7, 7, 8, 7, 7, 6, 6, 5, 5, 0, 0, 0, 0, 0, 0, 0}, // offsets_by_length {0, 0, 0, 0, 0, 4096, 9216, 21504, 35840, 44032, 53248, 63488, 74752, 87040, 93696, 100864, 104704, 106752, 108928, 113536, 115968, 118528, 119872, 121280, 122016, 122784, 122784, 122784, 122784, 122784, 122784, 122784}, // dictionary data {116, 105, 109, 101, 100, 111, 119, 110, 108, 105, 102, 101, 108, 101, 102, 116, 98, 97, 99, 107, 99, 111, 100, 101, 100, 97, 116, 97, 115, 104, 111, 119, 111, 110, 108, 121, 115, 105, 116, 101, 99, 105, 116, 121, 111, 112, 101, 110, 106, 117, 115, 116, 108, 105, 107, 101, 102, 114, 101, 101, 119, 111, 114, 107, 116, 101, 120, 116, 121, 101, 97, 114, 111, 118, 101, 114, 98, 111, 100, 121, 108, 111, 118, 101, 102, 111, 114, 109, 98, 111, 111, 107, 112, 108, 97, 121, 108, 105, 118, 101, 108, 105, 110, 101, 104, 101, 108, 112, 104, 111, 109, 101, 115, 105, 100, 101, 109, 111, 114, 101, 119, 111, 114, 100, 108, 111, 110, 103, 116, 104, 101, 109, 118, 105, 101, 119, 102, 105, 110, 100, 112, 97, 103, 101, 100, 97, 121, 115, 102, 117, 108, 108, 104, 101, 97, 100, 116, 101, 114, 109, 101, 97, 99, 104, 97, 114, 101, 97, 102, 114, 111, 109, 116, 114, 117, 101, 109, 97, 114, 107, 97, 98, 108, 101, 117, 112, 111, 110, 104, 105, 103, 104, 100, 97, 116, 101, 108, 97, 110, 100, 110, 101, 119, 115, 101, 118, 101, 110, 110, 101, 120, 116, 99, 97, 115, 101, 98, 111, 116, 104, 112, 111, 115, 116, 117, 115, 101, 100, 109, 97, 100, 101, 104, 97, 110, 100, 104, 101, 114, 101, 119, 104, 97, 116, 110, 97, 109, 101, 76, 105, 110, 107, 98, 108, 111, 103, 115, 105, 122, 101, 98, 97, 115, 101, 104, 101, 108, 100, 109, 97, 107, 101, 109, 97, 105, 110, 117, 115, 101, 114, 39, 41, 32, 43, 104, 111, 108, 100, 101, 110, 100, 115, 119, 105, 116, 104, 78, 101, 119, 115, 114, 101, 97, 100, 119, 101, 114, 101, 115, 105, 103, 110, 116, 97, 107, 101, 104, 97, 118, 101, 103, 97, 109, 101, 115, 101, 101, 110, 99, 97, 108, 108, 112, 97, 116, 104, 119, 101, 108, 108, 112, 108, 117, 115, 109, 101, 110, 117, 102, 105, 108, 109, 112, 97, 114, 116, 106, 111, 105, 110, 116, 104, 105, 115, 108, 105, 115, 116, 103, 111, 111, 100, 110, 101, 101, 100, 119, 97, 121, 115, 119, 101, 115, 116, 106, 111, 98, 115, 109, 105, 110, 100, 97, 108, 115, 111, 108, 111, 103, 111, 114, 105, 99, 104, 117, 115, 101, 115, 108, 97, 115, 116, 116, 101, 97, 109, 97, 114, 109, 121, 102, 111, 111, 100, 107, 105, 110, 103, 119, 105, 108, 108, 101, 97, 115, 116, 119, 97, 114, 100, 98, 101, 115, 116, 102, 105, 114, 101, 80, 97, 103, 101, 107, 110, 111, 119, 97, 119, 97, 121, 46, 112, 110, 103, 109, 111, 118, 101, 116, 104, 97, 110, 108, 111, 97, 100, 103, 105, 118, 101, 115, 101, 108, 102, 110, 111, 116, 101, 109, 117, 99, 104, 102, 101, 101, 100, 109, 97, 110, 121, 114, 111, 99, 107, 105, 99, 111, 110, 111, 110, 99, 101, 108, 111, 111, 107, 104, 105, 100, 101, 100, 105, 101, 100, 72, 111, 109, 101, 114, 117, 108, 101, 104, 111, 115, 116, 97, 106, 97, 120, 105, 110, 102, 111, 99, 108, 117, 98, 108, 97, 119, 115, 108, 101, 115, 115, 104, 97, 108, 102, 115, 111, 109, 101, 115, 117, 99, 104, 122, 111, 110, 101, 49, 48, 48, 37, 111, 110, 101, 115, 99, 97, 114, 101, 84, 105, 109, 101, 114, 97, 99, 101, 98, 108, 117, 101, 102, 111, 117, 114, 119, 101, 101, 107, 102, 97, 99, 101, 104, 111, 112, 101, 103, 97, 118, 101, 104, 97, 114, 100, 108, 111, 115, 116, 119, 104, 101, 110, 112, 97, 114, 107, 107, 101, 112, 116, 112, 97, 115, 115, 115, 104, 105, 112, 114, 111, 111, 109, 72, 84, 77, 76, 112, 108, 97, 110, 84, 121, 112, 101, 100, 111, 110, 101, 115, 97, 118, 101, 107, 101, 101, 112, 102, 108, 97, 103, 108, 105, 110, 107, 115, 111, 108, 100, 102, 105, 118, 101, 116, 111, 111, 107, 114, 97, 116, 101, 116, 111, 119, 110, 106, 117, 109, 112, 116, 104, 117, 115, 100, 97, 114, 107, 99, 97, 114, 100, 102, 105, 108, 101, 102, 101, 97, 114, 115, 116, 97, 121, 107, 105, 108, 108, 116, 104, 97, 116, 102, 97, 108, 108, 97, 117, 116, 111, 101, 118, 101, 114, 46, 99, 111, 109, 116, 97, 108, 107, 115, 104, 111, 112, 118, 111, 116, 101, 100, 101, 101, 112, 109, 111, 100, 101, 114, 101, 115, 116, 116, 117, 114, 110, 98, 111, 114, 110, 98, 97, 110, 100, 102, 101, 108, 108, 114, 111, 115, 101, 117, 114, 108, 40, 115, 107, 105, 110, 114, 111, 108, 101, 99, 111, 109, 101, 97, 99, 116, 115, 97, 103, 101, 115, 109, 101, 101, 116, 103, 111, 108, 100, 46, 106, 112, 103, 105, 116, 101, 109, 118, 97, 114, 121, 102, 101, 108, 116, 116, 104, 101, 110, 115, 101, 110, 100, 100, 114, 111, 112, 86, 105, 101, 119, 99, 111, 112, 121, 49, 46, 48, 34, 60, 47, 97, 62, 115, 116, 111, 112, 101, 108, 115, 101, 108, 105, 101, 115, 116, 111, 117, 114, 112, 97, 99, 107, 46, 103, 105, 102, 112, 97, 115, 116, 99, 115, 115, 63, 103, 114, 97, 121, 109, 101, 97, 110, 38, 103, 116, 59, 114, 105, 100, 101, 115, 104, 111, 116, 108, 97, 116, 101, 115, 97, 105, 100, 114, 111, 97, 100, 118, 97, 114, 32, 102, 101, 101, 108, 106, 111, 104, 110, 114, 105, 99, 107, 112, 111, 114, 116, 102, 97, 115, 116, 39, 85, 65, 45, 100, 101, 97, 100, 60, 47, 98, 62, 112, 111, 111, 114, 98, 105, 108, 108, 116, 121, 112, 101, 85, 46, 83, 46, 119, 111, 111, 100, 109, 117, 115, 116, 50, 112, 120, 59, 73, 110, 102, 111, 114, 97, 110, 107, 119, 105, 100, 101, 119, 97, 110, 116, 119, 97, 108, 108, 108, 101, 97, 100, 91, 48, 93, 59, 112, 97, 117, 108, 119, 97, 118, 101, 115, 117, 114, 101, 36, 40, 39, 35, 119, 97, 105, 116, 109, 97, 115, 115, 97, 114, 109, 115, 103, 111, 101, 115, 103, 97, 105, 110, 108, 97, 110, 103, 112, 97, 105, 100, 33, 45, 45, 32, 108, 111, 99, 107, 117, 110, 105, 116, 114, 111, 111, 116, 119, 97, 108, 107, 102, 105, 114, 109, 119, 105, 102, 101, 120, 109, 108, 34, 115, 111, 110, 103, 116, 101, 115, 116, 50, 48, 112, 120, 107, 105, 110, 100, 114, 111, 119, 115, 116, 111, 111, 108, 102, 111, 110, 116, 109, 97, 105, 108, 115, 97, 102, 101, 115, 116, 97, 114, 109, 97, 112, 115, 99, 111, 114, 101, 114, 97, 105, 110, 102, 108, 111, 119, 98, 97, 98, 121, 115, 112, 97, 110, 115, 97, 121, 115, 52, 112, 120, 59, 54, 112, 120, 59, 97, 114, 116, 115, 102, 111, 111, 116, 114, 101, 97, 108, 119, 105, 107, 105, 104, 101, 97, 116, 115, 116, 101, 112, 116, 114, 105, 112, 111, 114, 103, 47, 108, 97, 107, 101, 119, 101, 97, 107, 116, 111, 108, 100, 70, 111, 114, 109, 99, 97, 115, 116, 102, 97, 110, 115, 98, 97, 110, 107, 118, 101, 114, 121, 114, 117, 110, 115, 106, 117, 108, 121, 116, 97, 115, 107, 49, 112, 120, 59, 103, 111, 97, 108, 103, 114, 101, 119, 115, 108, 111, 119, 101, 100, 103, 101, 105, 100, 61, 34, 115, 101, 116, 115, 53, 112, 120, 59, 46, 106, 115, 63, 52, 48, 112, 120, 105, 102, 32, 40, 115, 111, 111, 110, 115, 101, 97, 116, 110, 111, 110, 101, 116, 117, 98, 101, 122, 101, 114, 111, 115, 101, 110, 116, 114, 101, 101, 100, 102, 97, 99, 116, 105, 110, 116, 111, 103, 105, 102, 116, 104, 97, 114, 109, 49, 56, 112, 120, 99, 97, 109, 101, 104, 105, 108, 108, 98, 111, 108, 100, 122, 111, 111, 109, 118, 111, 105, 100, 101, 97, 115, 121, 114, 105, 110, 103, 102, 105, 108, 108, 112, 101, 97, 107, 105, 110, 105, 116, 99, 111, 115, 116, 51, 112, 120, 59, 106, 97, 99, 107, 116, 97, 103, 115, 98, 105, 116, 115, 114, 111, 108, 108, 101, 100, 105, 116, 107, 110, 101, 119, 110, 101, 97, 114, 60, 33, 45, 45, 103, 114, 111, 119, 74, 83, 79, 78, 100, 117, 116, 121, 78, 97, 109, 101, 115, 97, 108, 101, 121, 111, 117, 32, 108, 111, 116, 115, 112, 97, 105, 110, 106, 97, 122, 122, 99, 111, 108, 100, 101, 121, 101, 115, 102, 105, 115, 104, 119, 119, 119, 46, 114, 105, 115, 107, 116, 97, 98, 115, 112, 114, 101, 118, 49, 48, 112, 120, 114, 105, 115, 101, 50, 53, 112, 120, 66, 108, 117, 101, 100, 105, 110, 103, 51, 48, 48, 44, 98, 97, 108, 108, 102, 111, 114, 100, 101, 97, 114, 110, 119, 105, 108, 100, 98, 111, 120, 46, 102, 97, 105, 114, 108, 97, 99, 107, 118, 101, 114, 115, 112, 97, 105, 114, 106, 117, 110, 101, 116, 101, 99, 104, 105, 102, 40, 33, 112, 105, 99, 107, 101, 118, 105, 108, 36, 40, 34, 35, 119, 97, 114, 109, 108, 111, 114, 100, 100, 111, 101, 115, 112, 117, 108, 108, 44, 48, 48, 48, 105, 100, 101, 97, 100, 114, 97, 119, 104, 117, 103, 101, 115, 112, 111, 116, 102, 117, 110, 100, 98, 117, 114, 110, 104, 114, 101, 102, 99, 101, 108, 108, 107, 101, 121, 115, 116, 105, 99, 107, 104, 111, 117, 114, 108, 111, 115, 115, 102, 117, 101, 108, 49, 50, 112, 120, 115, 117, 105, 116, 100, 101, 97, 108, 82, 83, 83, 34, 97, 103, 101, 100, 103, 114, 101, 121, 71, 69, 84, 34, 101, 97, 115, 101, 97, 105, 109, 115, 103, 105, 114, 108, 97, 105, 100, 115, 56, 112, 120, 59, 110, 97, 118, 121, 103, 114, 105, 100, 116, 105, 112, 115, 35, 57, 57, 57, 119, 97, 114, 115, 108, 97, 100, 121, 99, 97, 114, 115, 41, 59, 32, 125, 112, 104, 112, 63, 104, 101, 108, 108, 116, 97, 108, 108, 119, 104, 111, 109, 122, 104, 58, 229, 42, 47, 13, 10, 32, 49, 48, 48, 104, 97, 108, 108, 46, 10, 10, 65, 55, 112, 120, 59, 112, 117, 115, 104, 99, 104, 97, 116, 48, 112, 120, 59, 99, 114, 101, 119, 42, 47, 60, 47, 104, 97, 115, 104, 55, 53, 112, 120, 102, 108, 97, 116, 114, 97, 114, 101, 32, 38, 38, 32, 116, 101, 108, 108, 99, 97, 109, 112, 111, 110, 116, 111, 108, 97, 105, 100, 109, 105, 115, 115, 115, 107, 105, 112, 116, 101, 110, 116, 102, 105, 110, 101, 109, 97, 108, 101, 103, 101, 116, 115, 112, 108, 111, 116, 52, 48, 48, 44, 13, 10, 13, 10, 99, 111, 111, 108, 102, 101, 101, 116, 46, 112, 104, 112, 60, 98, 114, 62, 101, 114, 105, 99, 109, 111, 115, 116, 103, 117, 105, 100, 98, 101, 108, 108, 100, 101, 115, 99, 104, 97, 105, 114, 109, 97, 116, 104, 97, 116, 111, 109, 47, 105, 109, 103, 38, 35, 56, 50, 108, 117, 99, 107, 99, 101, 110, 116, 48, 48, 48, 59, 116, 105, 110, 121, 103, 111, 110, 101, 104, 116, 109, 108, 115, 101, 108, 108, 100, 114, 117, 103, 70, 82, 69, 69, 110, 111, 100, 101, 110, 105, 99, 107, 63, 105, 100, 61, 108, 111, 115, 101, 110, 117, 108, 108, 118, 97, 115, 116, 119, 105, 110, 100, 82, 83, 83, 32, 119, 101, 97, 114, 114, 101, 108, 121, 98, 101, 101, 110, 115, 97, 109, 101, 100, 117, 107, 101, 110, 97, 115, 97, 99, 97, 112, 101, 119, 105, 115, 104, 103, 117, 108, 102, 84, 50, 51, 58, 104, 105, 116, 115, 115, 108, 111, 116, 103, 97, 116, 101, 107, 105, 99, 107, 98, 108, 117, 114, 116, 104, 101, 121, 49, 53, 112, 120, 39, 39, 41, 59, 41, 59, 34, 62, 109, 115, 105, 101, 119, 105, 110, 115, 98, 105, 114, 100, 115, 111, 114, 116, 98, 101, 116, 97, 115, 101, 101, 107, 84, 49, 56, 58, 111, 114, 100, 115, 116, 114, 101, 101, 109, 97, 108, 108, 54, 48, 112, 120, 102, 97, 114, 109, 226, 128, 153, 115, 98, 111, 121, 115, 91, 48, 93, 46, 39, 41, 59, 34, 80, 79, 83, 84, 98, 101, 97, 114, 107, 105, 100, 115, 41, 59, 125, 125, 109, 97, 114, 121, 116, 101, 110, 100, 40, 85, 75, 41, 113, 117, 97, 100, 122, 104, 58, 230, 45, 115, 105, 122, 45, 45, 45, 45, 112, 114, 111, 112, 39, 41, 59, 13, 108, 105, 102, 116, 84, 49, 57, 58, 118, 105, 99, 101, 97, 110, 100, 121, 100, 101, 98, 116, 62, 82, 83, 83, 112, 111, 111, 108, 110, 101, 99, 107, 98, 108, 111, 119, 84, 49, 54, 58, 100, 111, 111, 114, 101, 118, 97, 108, 84, 49, 55, 58, 108, 101, 116, 115, 102, 97, 105, 108, 111, 114, 97, 108, 112, 111, 108, 108, 110, 111, 118, 97, 99, 111, 108, 115, 103, 101, 110, 101, 32, 226, 128, 148, 115, 111, 102, 116, 114, 111, 109, 101, 116, 105, 108, 108, 114, 111, 115, 115, 60, 104, 51, 62, 112, 111, 117, 114, 102, 97, 100, 101, 112, 105, 110, 107, 60, 116, 114, 62, 109, 105, 110, 105, 41, 124, 33, 40, 109, 105, 110, 101, 122, 104, 58, 232, 98, 97, 114, 115, 104, 101, 97, 114, 48, 48, 41, 59, 109, 105, 108, 107, 32, 45, 45, 62, 105, 114, 111, 110, 102, 114, 101, 100, 100, 105, 115, 107, 119, 101, 110, 116, 115, 111, 105, 108, 112, 117, 116, 115, 47, 106, 115, 47, 104, 111, 108, 121, 84, 50, 50, 58, 73, 83, 66, 78, 84, 50, 48, 58, 97, 100, 97, 109, 115, 101, 101, 115, 60, 104, 50, 62, 106, 115, 111, 110, 39, 44, 32, 39, 99, 111, 110, 116, 84, 50, 49, 58, 32, 82, 83, 83, 108, 111, 111, 112, 97, 115, 105, 97, 109, 111, 111, 110, 60, 47, 112, 62, 115, 111, 117, 108, 76, 73, 78, 69, 102, 111, 114, 116, 99, 97, 114, 116, 84, 49, 52, 58, 60, 104, 49, 62, 56, 48, 112, 120, 33, 45, 45, 60, 57, 112, 120, 59, 84, 48, 52, 58, 109, 105, 107, 101, 58, 52, 54, 90, 110, 105, 99, 101, 105, 110, 99, 104, 89, 111, 114, 107, 114, 105, 99, 101, 122, 104, 58, 228, 39, 41, 41, 59, 112, 117, 114, 101, 109, 97, 103, 101, 112, 97, 114, 97, 116, 111, 110, 101, 98, 111, 110, 100, 58, 51, 55, 90, 95, 111, 102, 95, 39, 93, 41, 59, 48, 48, 48, 44, 122, 104, 58, 231, 116, 97, 110, 107, 121, 97, 114, 100, 98, 111, 119, 108, 98, 117, 115, 104, 58, 53, 54, 90, 74, 97, 118, 97, 51, 48, 112, 120, 10, 124, 125, 10, 37, 67, 51, 37, 58, 51, 52, 90, 106, 101, 102, 102, 69, 88, 80, 73, 99, 97, 115, 104, 118, 105, 115, 97, 103, 111, 108, 102, 115, 110, 111, 119, 122, 104, 58, 233, 113, 117, 101, 114, 46, 99, 115, 115, 115, 105, 99, 107, 109, 101, 97, 116, 109, 105, 110, 46, 98, 105, 110, 100, 100, 101, 108, 108, 104, 105, 114, 101, 112, 105, 99, 115, 114, 101, 110, 116, 58, 51, 54, 90, 72, 84, 84, 80, 45, 50, 48, 49, 102, 111, 116, 111, 119, 111, 108, 102, 69, 78, 68, 32, 120, 98, 111, 120, 58, 53, 52, 90, 66, 79, 68, 89, 100, 105, 99, 107, 59, 10, 125, 10, 101, 120, 105, 116, 58, 51, 53, 90, 118, 97, 114, 115, 98, 101, 97, 116, 39, 125, 41, 59, 100, 105, 101, 116, 57, 57, 57, 59, 97, 110, 110, 101, 125, 125, 60, 47, 91, 105, 93, 46, 76, 97, 110, 103, 107, 109, 194, 178, 119, 105, 114, 101, 116, 111, 121, 115, 97, 100, 100, 115, 115, 101, 97, 108, 97, 108, 101, 120, 59, 10, 9, 125, 101, 99, 104, 111, 110, 105, 110, 101, 46, 111, 114, 103, 48, 48, 53, 41, 116, 111, 110, 121, 106, 101, 119, 115, 115, 97, 110, 100, 108, 101, 103, 115, 114, 111, 111, 102, 48, 48, 48, 41, 32, 50, 48, 48, 119, 105, 110, 101, 103, 101, 97, 114, 100, 111, 103, 115, 98, 111, 111, 116, 103, 97, 114, 121, 99, 117, 116, 115, 116, 121, 108, 101, 116, 101, 109, 112, 116, 105, 111, 110, 46, 120, 109, 108, 99, 111, 99, 107, 103, 97, 110, 103, 36, 40, 39, 46, 53, 48, 112, 120, 80, 104, 46, 68, 109, 105, 115, 99, 97, 108, 97, 110, 108, 111, 97, 110, 100, 101, 115, 107, 109, 105, 108, 101, 114, 121, 97, 110, 117, 110, 105, 120, 100, 105, 115, 99, 41, 59, 125, 10, 100, 117, 115, 116, 99, 108, 105, 112, 41, 46, 10, 10, 55, 48, 112, 120, 45, 50, 48, 48, 68, 86, 68, 115, 55, 93, 62, 60, 116, 97, 112, 101, 100, 101, 109, 111, 105, 43, 43, 41, 119, 97, 103, 101, 101, 117, 114, 111, 112, 104, 105, 108, 111, 112, 116, 115, 104, 111, 108, 101, 70, 65, 81, 115, 97, 115, 105, 110, 45, 50, 54, 84, 108, 97, 98, 115, 112, 101, 116, 115, 85, 82, 76, 32, 98, 117, 108, 107, 99, 111, 111, 107, 59, 125, 13, 10, 72, 69, 65, 68, 91, 48, 93, 41, 97, 98, 98, 114, 106, 117, 97, 110, 40, 49, 57, 56, 108, 101, 115, 104, 116, 119, 105, 110, 60, 47, 105, 62, 115, 111, 110, 121, 103, 117, 121, 115, 102, 117, 99, 107, 112, 105, 112, 101, 124, 45, 10, 33, 48, 48, 50, 41, 110, 100, 111, 119, 91, 49, 93, 59, 91, 93, 59, 10, 76, 111, 103, 32, 115, 97, 108, 116, 13, 10, 9, 9, 98, 97, 110, 103, 116, 114, 105, 109, 98, 97, 116, 104, 41, 123, 13, 10, 48, 48, 112, 120, 10, 125, 41, 59, 107, 111, 58, 236, 102, 101, 101, 115, 97, 100, 62, 13, 115, 58, 47, 47, 32, 91, 93, 59, 116, 111, 108, 108, 112, 108, 117, 103, 40, 41, 123, 10, 123, 13, 10, 32, 46, 106, 115, 39, 50, 48, 48, 112, 100, 117, 97, 108, 98, 111, 97, 116, 46, 74, 80, 71, 41, 59, 10, 125, 113, 117, 111, 116, 41, 59, 10, 10, 39, 41, 59, 10, 13, 10, 125, 13, 50, 48, 49, 52, 50, 48, 49, 53, 50, 48, 49, 54, 50, 48, 49, 55, 50, 48, 49, 56, 50, 48, 49, 57, 50, 48, 50, 48, 50, 48, 50, 49, 50, 48, 50, 50, 50, 48, 50, 51, 50, 48, 50, 52, 50, 48, 50, 53, 50, 48, 50, 54, 50, 48, 50, 55, 50, 48, 50, 56, 50, 48, 50, 57, 50, 48, 51, 48, 50, 48, 51, 49, 50, 48, 51, 50, 50, 48, 51, 51, 50, 48, 51, 52, 50, 48, 51, 53, 50, 48, 51, 54, 50, 48, 51, 55, 50, 48, 49, 51, 50, 48, 49, 50, 50, 48, 49, 49, 50, 48, 49, 48, 50, 48, 48, 57, 50, 48, 48, 56, 50, 48, 48, 55, 50, 48, 48, 54, 50, 48, 48, 53, 50, 48, 48, 52, 50, 48, 48, 51, 50, 48, 48, 50, 50, 48, 48, 49, 50, 48, 48, 48, 49, 57, 57, 57, 49, 57, 57, 56, 49, 57, 57, 55, 49, 57, 57, 54, 49, 57, 57, 53, 49, 57, 57, 52, 49, 57, 57, 51, 49, 57, 57, 50, 49, 57, 57, 49, 49, 57, 57, 48, 49, 57, 56, 57, 49, 57, 56, 56, 49, 57, 56, 55, 49, 57, 56, 54, 49, 57, 56, 53, 49, 57, 56, 52, 49, 57, 56, 51, 49, 57, 56, 50, 49, 57, 56, 49, 49, 57, 56, 48, 49, 57, 55, 57, 49, 57, 55, 56, 49, 57, 55, 55, 49, 57, 55, 54, 49, 57, 55, 53, 49, 57, 55, 52, 49, 57, 55, 51, 49, 57, 55, 50, 49, 57, 55, 49, 49, 57, 55, 48, 49, 57, 54, 57, 49, 57, 54, 56, 49, 57, 54, 55, 49, 57, 54, 54, 49, 57, 54, 53, 49, 57, 54, 52, 49, 57, 54, 51, 49, 57, 54, 50, 49, 57, 54, 49, 49, 57, 54, 48, 49, 57, 53, 57, 49, 57, 53, 56, 49, 57, 53, 55, 49, 57, 53, 54, 49, 57, 53, 53, 49, 57, 53, 52, 49, 57, 53, 51, 49, 57, 53, 50, 49, 57, 53, 49, 49, 57, 53, 48, 49, 48, 48, 48, 49, 48, 50, 52, 49, 51, 57, 52, 48, 48, 48, 48, 57, 57, 57, 57, 99, 111, 109, 111, 109, 195, 161, 115, 101, 115, 116, 101, 101, 115, 116, 97, 112, 101, 114, 111, 116, 111, 100, 111, 104, 97, 99, 101, 99, 97, 100, 97, 97, 195, 177, 111, 98, 105, 101, 110, 100, 195, 173, 97, 97, 115, 195, 173, 118, 105, 100, 97, 99, 97, 115, 111, 111, 116, 114, 111, 102, 111, 114, 111, 115, 111, 108, 111, 111, 116, 114, 97, 99, 117, 97, 108, 100, 105, 106, 111, 115, 105, 100, 111, 103, 114, 97, 110, 116, 105, 112, 111, 116, 101, 109, 97, 100, 101, 98, 101, 97, 108, 103, 111, 113, 117, 195, 169, 101, 115, 116, 111, 110, 97, 100, 97, 116, 114, 101, 115, 112, 111, 99, 111, 99, 97, 115, 97, 98, 97, 106, 111, 116, 111, 100, 97, 115, 105, 110, 111, 97, 103, 117, 97, 112, 117, 101, 115, 117, 110, 111, 115, 97, 110, 116, 101, 100, 105, 99, 101, 108, 117, 105, 115, 101, 108, 108, 97, 109, 97, 121, 111, 122, 111, 110, 97, 97, 109, 111, 114, 112, 105, 115, 111, 111, 98, 114, 97, 99, 108, 105, 99, 101, 108, 108, 111, 100, 105, 111, 115, 104, 111, 114, 97, 99, 97, 115, 105, 208, 183, 208, 176, 208, 189, 208, 176, 208, 190, 208, 188, 209, 128, 208, 176, 209, 128, 209, 131, 209, 130, 208, 176, 208, 189, 208, 181, 208, 191, 208, 190, 208, 190, 209, 130, 208, 184, 208, 183, 208, 189, 208, 190, 208, 180, 208, 190, 209, 130, 208, 190, 208, 182, 208, 181, 208, 190, 208, 189, 208, 184, 209, 133, 208, 157, 208, 176, 208, 181, 208, 181, 208, 177, 209, 139, 208, 188, 209, 139, 208, 146, 209, 139, 209, 129, 208, 190, 208, 178, 209, 139, 208, 178, 208, 190, 208, 157, 208, 190, 208, 190, 208, 177, 208, 159, 208, 190, 208, 187, 208, 184, 208, 189, 208, 184, 208, 160, 208, 164, 208, 157, 208, 181, 208, 156, 209, 139, 209, 130, 209, 139, 208, 158, 208, 189, 208, 184, 208, 188, 208, 180, 208, 176, 208, 151, 208, 176, 208, 148, 208, 176, 208, 157, 209, 131, 208, 158, 208, 177, 209, 130, 208, 181, 208, 152, 208, 183, 208, 181, 208, 185, 208, 189, 209, 131, 208, 188, 208, 188, 208, 162, 209, 139, 209, 131, 208, 182, 217, 129, 217, 138, 216, 163, 217, 134, 217, 133, 216, 167, 217, 133, 216, 185, 217, 131, 217, 132, 216, 163, 217, 136, 216, 177, 216, 175, 217, 138, 216, 167, 217, 129, 217, 137, 217, 135, 217, 136, 217, 132, 217, 133, 217, 132, 217, 131, 216, 167, 217, 136, 217, 132, 217, 135, 216, 168, 216, 179, 216, 167, 217, 132, 216, 165, 217, 134, 217, 135, 217, 138, 216, 163, 217, 138, 217, 130, 216, 175, 217, 135, 217, 132, 216, 171, 217, 133, 216, 168, 217, 135, 217, 132, 217, 136, 217, 132, 217, 138, 216, 168, 217, 132, 216, 167, 217, 138, 216, 168, 217, 131, 216, 180, 217, 138, 216, 167, 217, 133, 216, 163, 217, 133, 217, 134, 216, 170, 216, 168, 217, 138, 217, 132, 217, 134, 216, 173, 216, 168, 217, 135, 217, 133, 217, 133, 216, 180, 217, 136, 216, 180, 102, 105, 114, 115, 116, 118, 105, 100, 101, 111, 108, 105, 103, 104, 116, 119, 111, 114, 108, 100, 109, 101, 100, 105, 97, 119, 104, 105, 116, 101, 99, 108, 111, 115, 101, 98, 108, 97, 99, 107, 114, 105, 103, 104, 116, 115, 109, 97, 108, 108, 98, 111, 111, 107, 115, 112, 108, 97, 99, 101, 109, 117, 115, 105, 99, 102, 105, 101, 108, 100, 111, 114, 100, 101, 114, 112, 111, 105, 110, 116, 118, 97, 108, 117, 101, 108, 101, 118, 101, 108, 116, 97, 98, 108, 101, 98, 111, 97, 114, 100, 104, 111, 117, 115, 101, 103, 114, 111, 117, 112, 119, 111, 114, 107, 115, 121, 101, 97, 114, 115, 115, 116, 97, 116, 101, 116, 111, 100, 97, 121, 119, 97, 116, 101, 114, 115, 116, 97, 114, 116, 115, 116, 121, 108, 101, 100, 101, 97, 116, 104, 112, 111, 119, 101, 114, 112, 104, 111, 110, 101, 110, 105, 103, 104, 116, 101, 114, 114, 111, 114, 105, 110, 112, 117, 116, 97, 98, 111, 117, 116, 116, 101, 114, 109, 115, 116, 105, 116, 108, 101, 116, 111, 111, 108, 115, 101, 118, 101, 110, 116, 108, 111, 99, 97, 108, 116, 105, 109, 101, 115, 108, 97, 114, 103, 101, 119, 111, 114, 100, 115, 103, 97, 109, 101, 115, 115, 104, 111, 114, 116, 115, 112, 97, 99, 101, 102, 111, 99, 117, 115, 99, 108, 101, 97, 114, 109, 111, 100, 101, 108, 98, 108, 111, 99, 107, 103, 117, 105, 100, 101, 114, 97, 100, 105, 111, 115, 104, 97, 114, 101, 119, 111, 109, 101, 110, 97, 103, 97, 105, 110, 109, 111, 110, 101, 121, 105, 109, 97, 103, 101, 110, 97, 109, 101, 115, 121, 111, 117, 110, 103, 108, 105, 110, 101, 115, 108, 97, 116, 101, 114, 99, 111, 108, 111, 114, 103, 114, 101, 101, 110, 102, 114, 111, 110, 116, 38, 97, 109, 112, 59, 119, 97, 116, 99, 104, 102, 111, 114, 99, 101, 112, 114, 105, 99, 101, 114, 117, 108, 101, 115, 98, 101, 103, 105, 110, 97, 102, 116, 101, 114, 118, 105, 115, 105, 116, 105, 115, 115, 117, 101, 97, 114, 101, 97, 115, 98, 101, 108, 111, 119, 105, 110, 100, 101, 120, 116, 111, 116, 97, 108, 104, 111, 117, 114, 115, 108, 97, 98, 101, 108, 112, 114, 105, 110, 116, 112, 114, 101, 115, 115, 98, 117, 105, 108, 116, 108, 105, 110, 107, 115, 115, 112, 101, 101, 100, 115, 116, 117, 100, 121, 116, 114, 97, 100, 101, 102, 111, 117, 110, 100, 115, 101, 110, 115, 101, 117, 110, 100, 101, 114, 115, 104, 111, 119, 110, 102, 111, 114, 109, 115, 114, 97, 110, 103, 101, 97, 100, 100, 101, 100, 115, 116, 105, 108, 108, 109, 111, 118, 101, 100, 116, 97, 107, 101, 110, 97, 98, 111, 118, 101, 102, 108, 97, 115, 104, 102, 105, 120, 101, 100, 111, 102, 116, 101, 110, 111, 116, 104, 101, 114, 118, 105, 101, 119, 115, 99, 104, 101, 99, 107, 108, 101, 103, 97, 108, 114, 105, 118, 101, 114, 105, 116, 101, 109, 115, 113, 117, 105, 99, 107, 115, 104, 97, 112, 101, 104, 117, 109, 97, 110, 101, 120, 105, 115, 116, 103, 111, 105, 110, 103, 109, 111, 118, 105, 101, 116, 104, 105, 114, 100, 98, 97, 115, 105, 99, 112, 101, 97, 99, 101, 115, 116, 97, 103, 101, 119, 105, 100, 116, 104, 108, 111, 103, 105, 110, 105, 100, 101, 97, 115, 119, 114, 111, 116, 101, 112, 97, 103, 101, 115, 117, 115, 101, 114, 115, 100, 114, 105, 118, 101, 115, 116, 111, 114, 101, 98, 114, 101, 97, 107, 115, 111, 117, 116, 104, 118, 111, 105, 99, 101, 115, 105, 116, 101, 115, 109, 111, 110, 116, 104, 119, 104, 101, 114, 101, 98, 117, 105, 108, 100, 119, 104, 105, 99, 104, 101, 97, 114, 116, 104, 102, 111, 114, 117, 109, 116, 104, 114, 101, 101, 115, 112, 111, 114, 116, 112, 97, 114, 116, 121, 67, 108, 105, 99, 107, 108, 111, 119, 101, 114, 108, 105, 118, 101, 115, 99, 108, 97, 115, 115, 108, 97, 121, 101, 114, 101, 110, 116, 114, 121, 115, 116, 111, 114, 121, 117, 115, 97, 103, 101, 115, 111, 117, 110, 100, 99, 111, 117, 114, 116, 121, 111, 117, 114, 32, 98, 105, 114, 116, 104, 112, 111, 112, 117, 112, 116, 121, 112, 101, 115, 97, 112, 112, 108, 121, 73, 109, 97, 103, 101, 98, 101, 105, 110, 103, 117, 112, 112, 101, 114, 110, 111, 116, 101, 115, 101, 118, 101, 114, 121, 115, 104, 111, 119, 115, 109, 101, 97, 110, 115, 101, 120, 116, 114, 97, 109, 97, 116, 99, 104, 116, 114, 97, 99, 107, 107, 110, 111, 119, 110, 101, 97, 114, 108, 121, 98, 101, 103, 97, 110, 115, 117, 112, 101, 114, 112, 97, 112, 101, 114, 110, 111, 114, 116, 104, 108, 101, 97, 114, 110, 103, 105, 118, 101, 110, 110, 97, 109, 101, 100, 101, 110, 100, 101, 100, 84, 101, 114, 109, 115, 112, 97, 114, 116, 115, 71, 114, 111, 117, 112, 98, 114, 97, 110, 100, 117, 115, 105, 110, 103, 119, 111, 109, 97, 110, 102, 97, 108, 115, 101, 114, 101, 97, 100, 121, 97, 117, 100, 105, 111, 116, 97, 107, 101, 115, 119, 104, 105, 108, 101, 46, 99, 111, 109, 47, 108, 105, 118, 101, 100, 99, 97, 115, 101, 115, 100, 97, 105, 108, 121, 99, 104, 105, 108, 100, 103, 114, 101, 97, 116, 106, 117, 100, 103, 101, 116, 104, 111, 115, 101, 117, 110, 105, 116, 115, 110, 101, 118, 101, 114, 98, 114, 111, 97, 100, 99, 111, 97, 115, 116, 99, 111, 118, 101, 114, 97, 112, 112, 108, 101, 102, 105, 108, 101, 115, 99, 121, 99, 108, 101, 115, 99, 101, 110, 101, 112, 108, 97, 110, 115, 99, 108, 105, 99, 107, 119, 114, 105, 116, 101, 113, 117, 101, 101, 110, 112, 105, 101, 99, 101, 101, 109, 97, 105, 108, 102, 114, 97, 109, 101, 111, 108, 100, 101, 114, 112, 104, 111, 116, 111, 108, 105, 109, 105, 116, 99, 97, 99, 104, 101, 99, 105, 118, 105, 108, 115, 99, 97, 108, 101, 101, 110, 116, 101, 114, 116, 104, 101, 109, 101, 116, 104, 101, 114, 101, 116, 111, 117, 99, 104, 98, 111, 117, 110, 100, 114, 111, 121, 97, 108, 97, 115, 107, 101, 100, 119, 104, 111, 108, 101, 115, 105, 110, 99, 101, 115, 116, 111, 99, 107, 32, 110, 97, 109, 101, 102, 97, 105, 116, 104, 104, 101, 97, 114, 116, 101, 109, 112, 116, 121, 111, 102, 102, 101, 114, 115, 99, 111, 112, 101, 111, 119, 110, 101, 100, 109, 105, 103, 104, 116, 97, 108, 98, 117, 109, 116, 104, 105, 110, 107, 98, 108, 111, 111, 100, 97, 114, 114, 97, 121, 109, 97, 106, 111, 114, 116, 114, 117, 115, 116, 99, 97, 110, 111, 110, 117, 110, 105, 111, 110, 99, 111, 117, 110, 116, 118, 97, 108, 105, 100, 115, 116, 111, 110, 101, 83, 116, 121, 108, 101, 76, 111, 103, 105, 110, 104, 97, 112, 112, 121, 111, 99, 99, 117, 114, 108, 101, 102, 116, 58, 102, 114, 101, 115, 104, 113, 117, 105, 116, 101, 102, 105, 108, 109, 115, 103, 114, 97, 100, 101, 110, 101, 101, 100, 115, 117, 114, 98, 97, 110, 102, 105, 103, 104, 116, 98, 97, 115, 105, 115, 104, 111, 118, 101, 114, 97, 117, 116, 111, 59, 114, 111, 117, 116, 101, 46, 104, 116, 109, 108, 109, 105, 120, 101, 100, 102, 105, 110, 97, 108, 89, 111, 117, 114, 32, 115, 108, 105, 100, 101, 116, 111, 112, 105, 99, 98, 114, 111, 119, 110, 97, 108, 111, 110, 101, 100, 114, 97, 119, 110, 115, 112, 108, 105, 116, 114, 101, 97, 99, 104, 82, 105, 103, 104, 116, 100, 97, 116, 101, 115, 109, 97, 114, 99, 104, 113, 117, 111, 116, 101, 103, 111, 111, 100, 115, 76, 105, 110, 107, 115, 100, 111, 117, 98, 116, 97, 115, 121, 110, 99, 116, 104, 117, 109, 98, 97, 108, 108, 111, 119, 99, 104, 105, 101, 102, 121, 111, 117, 116, 104, 110, 111, 118, 101, 108, 49, 48, 112, 120, 59, 115, 101, 114, 118, 101, 117, 110, 116, 105, 108, 104, 97, 110, 100, 115, 67, 104, 101, 99, 107, 83, 112, 97, 99, 101, 113, 117, 101, 114, 121, 106, 97, 109, 101, 115, 101, 113, 117, 97, 108, 116, 119, 105, 99, 101, 48, 44, 48, 48, 48, 83, 116, 97, 114, 116, 112, 97, 110, 101, 108, 115, 111, 110, 103, 115, 114, 111, 117, 110, 100, 101, 105, 103, 104, 116, 115, 104, 105, 102, 116, 119, 111, 114, 116, 104, 112, 111, 115, 116, 115, 108, 101, 97, 100, 115, 119, 101, 101, 107, 115, 97, 118, 111, 105, 100, 116, 104, 101, 115, 101, 109, 105, 108, 101, 115, 112, 108, 97, 110, 101, 115, 109, 97, 114, 116, 97, 108, 112, 104, 97, 112, 108, 97, 110, 116, 109, 97, 114, 107, 115, 114, 97, 116, 101, 115, 112, 108, 97, 121, 115, 99, 108, 97, 105, 109, 115, 97, 108, 101, 115, 116, 101, 120, 116, 115, 115, 116, 97, 114, 115, 119, 114, 111, 110, 103, 60, 47, 104, 51, 62, 116, 104, 105, 110, 103, 46, 111, 114, 103, 47, 109, 117, 108, 116, 105, 104, 101, 97, 114, 100, 80, 111, 119, 101, 114, 115, 116, 97, 110, 100, 116, 111, 107, 101, 110, 115, 111, 108, 105, 100, 40, 116, 104, 105, 115, 98, 114, 105, 110, 103, 115, 104, 105, 112, 115, 115, 116, 97, 102, 102, 116, 114, 105, 101, 100, 99, 97, 108, 108, 115, 102, 117, 108, 108, 121, 102, 97, 99, 116, 115, 97, 103, 101, 110, 116, 84, 104, 105, 115, 32, 47, 47, 45, 45, 62, 97, 100, 109, 105, 110, 101, 103, 121, 112, 116, 69, 118, 101, 110, 116, 49, 53, 112, 120, 59, 69, 109, 97, 105, 108, 116, 114, 117, 101, 34, 99, 114, 111, 115, 115, 115, 112, 101, 110, 116, 98, 108, 111, 103, 115, 98, 111, 120, 34, 62, 110, 111, 116, 101, 100, 108, 101, 97, 118, 101, 99, 104, 105, 110, 97, 115, 105, 122, 101, 115, 103, 117, 101, 115, 116, 60, 47, 104, 52, 62, 114, 111, 98, 111, 116, 104, 101, 97, 118, 121, 116, 114, 117, 101, 44, 115, 101, 118, 101, 110, 103, 114, 97, 110, 100, 99, 114, 105, 109, 101, 115, 105, 103, 110, 115, 97, 119, 97, 114, 101, 100, 97, 110, 99, 101, 112, 104, 97, 115, 101, 62, 60, 33, 45, 45, 101, 110, 95, 85, 83, 38, 35, 51, 57, 59, 50, 48, 48, 112, 120, 95, 110, 97, 109, 101, 108, 97, 116, 105, 110, 101, 110, 106, 111, 121, 97, 106, 97, 120, 46, 97, 116, 105, 111, 110, 115, 109, 105, 116, 104, 85, 46, 83, 46, 32, 104, 111, 108, 100, 115, 112, 101, 116, 101, 114, 105, 110, 100, 105, 97, 110, 97, 118, 34, 62, 99, 104, 97, 105, 110, 115, 99, 111, 114, 101, 99, 111, 109, 101, 115, 100, 111, 105, 110, 103, 112, 114, 105, 111, 114, 83, 104, 97, 114, 101, 49, 57, 57, 48, 115, 114, 111, 109, 97, 110, 108, 105, 115, 116, 115, 106, 97, 112, 97, 110, 102, 97, 108, 108, 115, 116, 114, 105, 97, 108, 111, 119, 110, 101, 114, 97, 103, 114, 101, 101, 60, 47, 104, 50, 62, 97, 98, 117, 115, 101, 97, 108, 101, 114, 116, 111, 112, 101, 114, 97, 34, 45, 47, 47, 87, 99, 97, 114, 100, 115, 104, 105, 108, 108, 115, 116, 101, 97, 109, 115, 80, 104, 111, 116, 111, 116, 114, 117, 116, 104, 99, 108, 101, 97, 110, 46, 112, 104, 112, 63, 115, 97, 105, 110, 116, 109, 101, 116, 97, 108, 108, 111, 117, 105, 115, 109, 101, 97, 110, 116, 112, 114, 111, 111, 102, 98, 114, 105, 101, 102, 114, 111, 119, 34, 62, 103, 101, 110, 114, 101, 116, 114, 117, 99, 107, 108, 111, 111, 107, 115, 86, 97, 108, 117, 101, 70, 114, 97, 109, 101, 46, 110, 101, 116, 47, 45, 45, 62, 10, 60, 116, 114, 121, 32, 123, 10, 118, 97, 114, 32, 109, 97, 107, 101, 115, 99, 111, 115, 116, 115, 112, 108, 97, 105, 110, 97, 100, 117, 108, 116, 113, 117, 101, 115, 116, 116, 114, 97, 105, 110, 108, 97, 98, 111, 114, 104, 101, 108, 112, 115, 99, 97, 117, 115, 101, 109, 97, 103, 105, 99, 109, 111, 116, 111, 114, 116, 104, 101, 105, 114, 50, 53, 48, 112, 120, 108, 101, 97, 115, 116, 115, 116, 101, 112, 115, 67, 111, 117, 110, 116, 99, 111, 117, 108, 100, 103, 108, 97, 115, 115, 115, 105, 100, 101, 115, 102, 117, 110, 100, 115, 104, 111, 116, 101, 108, 97, 119, 97, 114, 100, 109, 111, 117, 116, 104, 109, 111, 118, 101, 115, 112, 97, 114, 105, 115, 103, 105, 118, 101, 115, 100, 117, 116, 99, 104, 116, 101, 120, 97, 115, 102, 114, 117, 105, 116, 110, 117, 108, 108, 44, 124, 124, 91, 93, 59, 116, 111, 112, 34, 62, 10, 60, 33, 45, 45, 80, 79, 83, 84, 34, 111, 99, 101, 97, 110, 60, 98, 114, 47, 62, 102, 108, 111, 111, 114, 115, 112, 101, 97, 107, 100, 101, 112, 116, 104, 32, 115, 105, 122, 101, 98, 97, 110, 107, 115, 99, 97, 116, 99, 104, 99, 104, 97, 114, 116, 50, 48, 112, 120, 59, 97, 108, 105, 103, 110, 100, 101, 97, 108, 115, 119, 111, 117, 108, 100, 53, 48, 112, 120, 59, 117, 114, 108, 61, 34, 112, 97, 114, 107, 115, 109, 111, 117, 115, 101, 77, 111, 115, 116, 32, 46, 46, 46, 60, 47, 97, 109, 111, 110, 103, 98, 114, 97, 105, 110, 98, 111, 100, 121, 32, 110, 111, 110, 101, 59, 98, 97, 115, 101, 100, 99, 97, 114, 114, 121, 100, 114, 97, 102, 116, 114, 101, 102, 101, 114, 112, 97, 103, 101, 95, 104, 111, 109, 101, 46, 109, 101, 116, 101, 114, 100, 101, 108, 97, 121, 100, 114, 101, 97, 109, 112, 114, 111, 118, 101, 106, 111, 105, 110, 116, 60, 47, 116, 114, 62, 100, 114, 117, 103, 115, 60, 33, 45, 45, 32, 97, 112, 114, 105, 108, 105, 100, 101, 97, 108, 97, 108, 108, 101, 110, 101, 120, 97, 99, 116, 102, 111, 114, 116, 104, 99, 111, 100, 101, 115, 108, 111, 103, 105, 99, 86, 105, 101, 119, 32, 115, 101, 101, 109, 115, 98, 108, 97, 110, 107, 112, 111, 114, 116, 115, 32, 40, 50, 48, 48, 115, 97, 118, 101, 100, 95, 108, 105, 110, 107, 103, 111, 97, 108, 115, 103, 114, 97, 110, 116, 103, 114, 101, 101, 107, 104, 111, 109, 101, 115, 114, 105, 110, 103, 115, 114, 97, 116, 101, 100, 51, 48, 112, 120, 59, 119, 104, 111, 115, 101, 112, 97, 114, 115, 101, 40, 41, 59, 34, 32, 66, 108, 111, 99, 107, 108, 105, 110, 117, 120, 106, 111, 110, 101, 115, 112, 105, 120, 101, 108, 39, 41, 59, 34, 62, 41, 59, 105, 102, 40, 45, 108, 101, 102, 116, 100, 97, 118, 105, 100, 104, 111, 114, 115, 101, 70, 111, 99, 117, 115, 114, 97, 105, 115, 101, 98, 111, 120, 101, 115, 84, 114, 97, 99, 107, 101, 109, 101, 110, 116, 60, 47, 101, 109, 62, 98, 97, 114, 34, 62, 46, 115, 114, 99, 61, 116, 111, 119, 101, 114, 97, 108, 116, 61, 34, 99, 97, 98, 108, 101, 104, 101, 110, 114, 121, 50, 52, 112, 120, 59, 115, 101, 116, 117, 112, 105, 116, 97, 108, 121, 115, 104, 97, 114, 112, 109, 105, 110, 111, 114, 116, 97, 115, 116, 101, 119, 97, 110, 116, 115, 116, 104, 105, 115, 46, 114, 101, 115, 101, 116, 119, 104, 101, 101, 108, 103, 105, 114, 108, 115, 47, 99, 115, 115, 47, 49, 48, 48, 37, 59, 99, 108, 117, 98, 115, 115, 116, 117, 102, 102, 98, 105, 98, 108, 101, 118, 111, 116, 101, 115, 32, 49, 48, 48, 48, 107, 111, 114, 101, 97, 125, 41, 59, 13, 10, 98, 97, 110, 100, 115, 113, 117, 101, 117, 101, 61, 32, 123, 125, 59, 56, 48, 112, 120, 59, 99, 107, 105, 110, 103, 123, 13, 10, 9, 9, 97, 104, 101, 97, 100, 99, 108, 111, 99, 107, 105, 114, 105, 115, 104, 108, 105, 107, 101, 32, 114, 97, 116, 105, 111, 115, 116, 97, 116, 115, 70, 111, 114, 109, 34, 121, 97, 104, 111, 111, 41, 91, 48, 93, 59, 65, 98, 111, 117, 116, 102, 105, 110, 100, 115, 60, 47, 104, 49, 62, 100, 101, 98, 117, 103, 116, 97, 115, 107, 115, 85, 82, 76, 32, 61, 99, 101, 108, 108, 115, 125, 41, 40, 41, 59, 49, 50, 112, 120, 59, 112, 114, 105, 109, 101, 116, 101, 108, 108, 115, 116, 117, 114, 110, 115, 48, 120, 54, 48, 48, 46, 106, 112, 103, 34, 115, 112, 97, 105, 110, 98, 101, 97, 99, 104, 116, 97, 120, 101, 115, 109, 105, 99, 114, 111, 97, 110, 103, 101, 108, 45, 45, 62, 60, 47, 103, 105, 102, 116, 115, 115, 116, 101, 118, 101, 45, 108, 105, 110, 107, 98, 111, 100, 121, 46, 125, 41, 59, 10, 9, 109, 111, 117, 110, 116, 32, 40, 49, 57, 57, 70, 65, 81, 60, 47, 114, 111, 103, 101, 114, 102, 114, 97, 110, 107, 67, 108, 97, 115, 115, 50, 56, 112, 120, 59, 102, 101, 101, 100, 115, 60, 104, 49, 62, 60, 115, 99, 111, 116, 116, 116, 101, 115, 116, 115, 50, 50, 112, 120, 59, 100, 114, 105, 110, 107, 41, 32, 124, 124, 32, 108, 101, 119, 105, 115, 115, 104, 97, 108, 108, 35, 48, 51, 57, 59, 32, 102, 111, 114, 32, 108, 111, 118, 101, 100, 119, 97, 115, 116, 101, 48, 48, 112, 120, 59, 106, 97, 58, 227, 130, 115, 105, 109, 111, 110, 60, 102, 111, 110, 116, 114, 101, 112, 108, 121, 109, 101, 101, 116, 115, 117, 110, 116, 101, 114, 99, 104, 101, 97, 112, 116, 105, 103, 104, 116, 66, 114, 97, 110, 100, 41, 32, 33, 61, 32, 100, 114, 101, 115, 115, 99, 108, 105, 112, 115, 114, 111, 111, 109, 115, 111, 110, 107, 101, 121, 109, 111, 98, 105, 108, 109, 97, 105, 110, 46, 78, 97, 109, 101, 32, 112, 108, 97, 116, 101, 102, 117, 110, 110, 121, 116, 114, 101, 101, 115, 99, 111, 109, 47, 34, 49, 46, 106, 112, 103, 119, 109, 111, 100, 101, 112, 97, 114, 97, 109, 83, 84, 65, 82, 84, 108, 101, 102, 116, 32, 105, 100, 100, 101, 110, 44, 32, 50, 48, 49, 41, 59, 10, 125, 10, 102, 111, 114, 109, 46, 118, 105, 114, 117, 115, 99, 104, 97, 105, 114, 116, 114, 97, 110, 115, 119, 111, 114, 115, 116, 80, 97, 103, 101, 115, 105, 116, 105, 111, 110, 112, 97, 116, 99, 104, 60, 33, 45, 45, 10, 111, 45, 99, 97, 99, 102, 105, 114, 109, 115, 116, 111, 117, 114, 115, 44, 48, 48, 48, 32, 97, 115, 105, 97, 110, 105, 43, 43, 41, 123, 97, 100, 111, 98, 101, 39, 41, 91, 48, 93, 105, 100, 61, 49, 48, 98, 111, 116, 104, 59, 109, 101, 110, 117, 32, 46, 50, 46, 109, 105, 46, 112, 110, 103, 34, 107, 101, 118, 105, 110, 99, 111, 97, 99, 104, 67, 104, 105, 108, 100, 98, 114, 117, 99, 101, 50, 46, 106, 112, 103, 85, 82, 76, 41, 43, 46, 106, 112, 103, 124, 115, 117, 105, 116, 101, 115, 108, 105, 99, 101, 104, 97, 114, 114, 121, 49, 50, 48, 34, 32, 115, 119, 101, 101, 116, 116, 114, 62, 13, 10, 110, 97, 109, 101, 61, 100, 105, 101, 103, 111, 112, 97, 103, 101, 32, 115, 119, 105, 115, 115, 45, 45, 62, 10, 10, 35, 102, 102, 102, 59, 34, 62, 76, 111, 103, 46, 99, 111, 109, 34, 116, 114, 101, 97, 116, 115, 104, 101, 101, 116, 41, 32, 38, 38, 32, 49, 52, 112, 120, 59, 115, 108, 101, 101, 112, 110, 116, 101, 110, 116, 102, 105, 108, 101, 100, 106, 97, 58, 227, 131, 105, 100, 61, 34, 99, 78, 97, 109, 101, 34, 119, 111, 114, 115, 101, 115, 104, 111, 116, 115, 45, 98, 111, 120, 45, 100, 101, 108, 116, 97, 10, 38, 108, 116, 59, 98, 101, 97, 114, 115, 58, 52, 56, 90, 60, 100, 97, 116, 97, 45, 114, 117, 114, 97, 108, 60, 47, 97, 62, 32, 115, 112, 101, 110, 100, 98, 97, 107, 101, 114, 115, 104, 111, 112, 115, 61, 32, 34, 34, 59, 112, 104, 112, 34, 62, 99, 116, 105, 111, 110, 49, 51, 112, 120, 59, 98, 114, 105, 97, 110, 104, 101, 108, 108, 111, 115, 105, 122, 101, 61, 111, 61, 37, 50, 70, 32, 106, 111, 105, 110, 109, 97, 121, 98, 101, 60, 105, 109, 103, 32, 105, 109, 103, 34, 62, 44, 32, 102, 106, 115, 105, 109, 103, 34, 32, 34, 41, 91, 48, 93, 77, 84, 111, 112, 66, 84, 121, 112, 101, 34, 110, 101, 119, 108, 121, 68, 97, 110, 115, 107, 99, 122, 101, 99, 104, 116, 114, 97, 105, 108, 107, 110, 111, 119, 115, 60, 47, 104, 53, 62, 102, 97, 113, 34, 62, 122, 104, 45, 99, 110, 49, 48, 41, 59, 10, 45, 49, 34, 41, 59, 116, 121, 112, 101, 61, 98, 108, 117, 101, 115, 116, 114, 117, 108, 121, 100, 97, 118, 105, 115, 46, 106, 115, 39, 59, 62, 13, 10, 60, 33, 115, 116, 101, 101, 108, 32, 121, 111, 117, 32, 104, 50, 62, 13, 10, 102, 111, 114, 109, 32, 106, 101, 115, 117, 115, 49, 48, 48, 37, 32, 109, 101, 110, 117, 46, 13, 10, 9, 13, 10, 119, 97, 108, 101, 115, 114, 105, 115, 107, 115, 117, 109, 101, 110, 116, 100, 100, 105, 110, 103, 98, 45, 108, 105, 107, 116, 101, 97, 99, 104, 103, 105, 102, 34, 32, 118, 101, 103, 97, 115, 100, 97, 110, 115, 107, 101, 101, 115, 116, 105, 115, 104, 113, 105, 112, 115, 117, 111, 109, 105, 115, 111, 98, 114, 101, 100, 101, 115, 100, 101, 101, 110, 116, 114, 101, 116, 111, 100, 111, 115, 112, 117, 101, 100, 101, 97, 195, 177, 111, 115, 101, 115, 116, 195, 161, 116, 105, 101, 110, 101, 104, 97, 115, 116, 97, 111, 116, 114, 111, 115, 112, 97, 114, 116, 101, 100, 111, 110, 100, 101, 110, 117, 101, 118, 111, 104, 97, 99, 101, 114, 102, 111, 114, 109, 97, 109, 105, 115, 109, 111, 109, 101, 106, 111, 114, 109, 117, 110, 100, 111, 97, 113, 117, 195, 173, 100, 195, 173, 97, 115, 115, 195, 179, 108, 111, 97, 121, 117, 100, 97, 102, 101, 99, 104, 97, 116, 111, 100, 97, 115, 116, 97, 110, 116, 111, 109, 101, 110, 111, 115, 100, 97, 116, 111, 115, 111, 116, 114, 97, 115, 115, 105, 116, 105, 111, 109, 117, 99, 104, 111, 97, 104, 111, 114, 97, 108, 117, 103, 97, 114, 109, 97, 121, 111, 114, 101, 115, 116, 111, 115, 104, 111, 114, 97, 115, 116, 101, 110, 101, 114, 97, 110, 116, 101, 115, 102, 111, 116, 111, 115, 101, 115, 116, 97, 115, 112, 97, 195, 173, 115, 110, 117, 101, 118, 97, 115, 97, 108, 117, 100, 102, 111, 114, 111, 115, 109, 101, 100, 105, 111, 113, 117, 105, 101, 110, 109, 101, 115, 101, 115, 112, 111, 100, 101, 114, 99, 104, 105, 108, 101, 115, 101, 114, 195, 161, 118, 101, 99, 101, 115, 100, 101, 99, 105, 114, 106, 111, 115, 195, 169, 101, 115, 116, 97, 114, 118, 101, 110, 116, 97, 103, 114, 117, 112, 111, 104, 101, 99, 104, 111, 101, 108, 108, 111, 115, 116, 101, 110, 103, 111, 97, 109, 105, 103, 111, 99, 111, 115, 97, 115, 110, 105, 118, 101, 108, 103, 101, 110, 116, 101, 109, 105, 115, 109, 97, 97, 105, 114, 101, 115, 106, 117, 108, 105, 111, 116, 101, 109, 97, 115, 104, 97, 99, 105, 97, 102, 97, 118, 111, 114, 106, 117, 110, 105, 111, 108, 105, 98, 114, 101, 112, 117, 110, 116, 111, 98, 117, 101, 110, 111, 97, 117, 116, 111, 114, 97, 98, 114, 105, 108, 98, 117, 101, 110, 97, 116, 101, 120, 116, 111, 109, 97, 114, 122, 111, 115, 97, 98, 101, 114, 108, 105, 115, 116, 97, 108, 117, 101, 103, 111, 99, 195, 179, 109, 111, 101, 110, 101, 114, 111, 106, 117, 101, 103, 111, 112, 101, 114, 195, 186, 104, 97, 98, 101, 114, 101, 115, 116, 111, 121, 110, 117, 110, 99, 97, 109, 117, 106, 101, 114, 118, 97, 108, 111, 114, 102, 117, 101, 114, 97, 108, 105, 98, 114, 111, 103, 117, 115, 116, 97, 105, 103, 117, 97, 108, 118, 111, 116, 111, 115, 99, 97, 115, 111, 115, 103, 117, 195, 173, 97, 112, 117, 101, 100, 111, 115, 111, 109, 111, 115, 97, 118, 105, 115, 111, 117, 115, 116, 101, 100, 100, 101, 98, 101, 110, 110, 111, 99, 104, 101, 98, 117, 115, 99, 97, 102, 97, 108, 116, 97, 101, 117, 114, 111, 115, 115, 101, 114, 105, 101, 100, 105, 99, 104, 111, 99, 117, 114, 115, 111, 99, 108, 97, 118, 101, 99, 97, 115, 97, 115, 108, 101, 195, 179, 110, 112, 108, 97, 122, 111, 108, 97, 114, 103, 111, 111, 98, 114, 97, 115, 118, 105, 115, 116, 97, 97, 112, 111, 121, 111, 106, 117, 110, 116, 111, 116, 114, 97, 116, 97, 118, 105, 115, 116, 111, 99, 114, 101, 97, 114, 99, 97, 109, 112, 111, 104, 101, 109, 111, 115, 99, 105, 110, 99, 111, 99, 97, 114, 103, 111, 112, 105, 115, 111, 115, 111, 114, 100, 101, 110, 104, 97, 99, 101, 110, 195, 161, 114, 101, 97, 100, 105, 115, 99, 111, 112, 101, 100, 114, 111, 99, 101, 114, 99, 97, 112, 117, 101, 100, 97, 112, 97, 112, 101, 108, 109, 101, 110, 111, 114, 195, 186, 116, 105, 108, 99, 108, 97, 114, 111, 106, 111, 114, 103, 101, 99, 97, 108, 108, 101, 112, 111, 110, 101, 114, 116, 97, 114, 100, 101, 110, 97, 100, 105, 101, 109, 97, 114, 99, 97, 115, 105, 103, 117, 101, 101, 108, 108, 97, 115, 115, 105, 103, 108, 111, 99, 111, 99, 104, 101, 109, 111, 116, 111, 115, 109, 97, 100, 114, 101, 99, 108, 97, 115, 101, 114, 101, 115, 116, 111, 110, 105, 195, 177, 111, 113, 117, 101, 100, 97, 112, 97, 115, 97, 114, 98, 97, 110, 99, 111, 104, 105, 106, 111, 115, 118, 105, 97, 106, 101, 112, 97, 98, 108, 111, 195, 169, 115, 116, 101, 118, 105, 101, 110, 101, 114, 101, 105, 110, 111, 100, 101, 106, 97, 114, 102, 111, 110, 100, 111, 99, 97, 110, 97, 108, 110, 111, 114, 116, 101, 108, 101, 116, 114, 97, 99, 97, 117, 115, 97, 116, 111, 109, 97, 114, 109, 97, 110, 111, 115, 108, 117, 110, 101, 115, 97, 117, 116, 111, 115, 118, 105, 108, 108, 97, 118, 101, 110, 100, 111, 112, 101, 115, 97, 114, 116, 105, 112, 111, 115, 116, 101, 110, 103, 97, 109, 97, 114, 99, 111, 108, 108, 101, 118, 97, 112, 97, 100, 114, 101, 117, 110, 105, 100, 111, 118, 97, 109, 111, 115, 122, 111, 110, 97, 115, 97, 109, 98, 111, 115, 98, 97, 110, 100, 97, 109, 97, 114, 105, 97, 97, 98, 117, 115, 111, 109, 117, 99, 104, 97, 115, 117, 98, 105, 114, 114, 105, 111, 106, 97, 118, 105, 118, 105, 114, 103, 114, 97, 100, 111, 99, 104, 105, 99, 97, 97, 108, 108, 195, 173, 106, 111, 118, 101, 110, 100, 105, 99, 104, 97, 101, 115, 116, 97, 110, 116, 97, 108, 101, 115, 115, 97, 108, 105, 114, 115, 117, 101, 108, 111, 112, 101, 115, 111, 115, 102, 105, 110, 101, 115, 108, 108, 97, 109, 97, 98, 117, 115, 99, 111, 195, 169, 115, 116, 97, 108, 108, 101, 103, 97, 110, 101, 103, 114, 111, 112, 108, 97, 122, 97, 104, 117, 109, 111, 114, 112, 97, 103, 97, 114, 106, 117, 110, 116, 97, 100, 111, 98, 108, 101, 105, 115, 108, 97, 115, 98, 111, 108, 115, 97, 98, 97, 195, 177, 111, 104, 97, 98, 108, 97, 108, 117, 99, 104, 97, 195, 129, 114, 101, 97, 100, 105, 99, 101, 110, 106, 117, 103, 97, 114, 110, 111, 116, 97, 115, 118, 97, 108, 108, 101, 97, 108, 108, 195, 161, 99, 97, 114, 103, 97, 100, 111, 108, 111, 114, 97, 98, 97, 106, 111, 101, 115, 116, 195, 169, 103, 117, 115, 116, 111, 109, 101, 110, 116, 101, 109, 97, 114, 105, 111, 102, 105, 114, 109, 97, 99, 111, 115, 116, 111, 102, 105, 99, 104, 97, 112, 108, 97, 116, 97, 104, 111, 103, 97, 114, 97, 114, 116, 101, 115, 108, 101, 121, 101, 115, 97, 113, 117, 101, 108, 109, 117, 115, 101, 111, 98, 97, 115, 101, 115, 112, 111, 99, 111, 115, 109, 105, 116, 97, 100, 99, 105, 101, 108, 111, 99, 104, 105, 99, 111, 109, 105, 101, 100, 111, 103, 97, 110, 97, 114, 115, 97, 110, 116, 111, 101, 116, 97, 112, 97, 100, 101, 98, 101, 115, 112, 108, 97, 121, 97, 114, 101, 100, 101, 115, 115, 105, 101, 116, 101, 99, 111, 114, 116, 101, 99, 111, 114, 101, 97, 100, 117, 100, 97, 115, 100, 101, 115, 101, 111, 118, 105, 101, 106, 111, 100, 101, 115, 101, 97, 97, 103, 117, 97, 115, 38, 113, 117, 111, 116, 59, 100, 111, 109, 97, 105, 110, 99, 111, 109, 109, 111, 110, 115, 116, 97, 116, 117, 115, 101, 118, 101, 110, 116, 115, 109, 97, 115, 116, 101, 114, 115, 121, 115, 116, 101, 109, 97, 99, 116, 105, 111, 110, 98, 97, 110, 110, 101, 114, 114, 101, 109, 111, 118, 101, 115, 99, 114, 111, 108, 108, 117, 112, 100, 97, 116, 101, 103, 108, 111, 98, 97, 108, 109, 101, 100, 105, 117, 109, 102, 105, 108, 116, 101, 114, 110, 117, 109, 98, 101, 114, 99, 104, 97, 110, 103, 101, 114, 101, 115, 117, 108, 116, 112, 117, 98, 108, 105, 99, 115, 99, 114, 101, 101, 110, 99, 104, 111, 111, 115, 101, 110, 111, 114, 109, 97, 108, 116, 114, 97, 118, 101, 108, 105, 115, 115, 117, 101, 115, 115, 111, 117, 114, 99, 101, 116, 97, 114, 103, 101, 116, 115, 112, 114, 105, 110, 103, 109, 111, 100, 117, 108, 101, 109, 111, 98, 105, 108, 101, 115, 119, 105, 116, 99, 104, 112, 104, 111, 116, 111, 115, 98, 111, 114, 100, 101, 114, 114, 101, 103, 105, 111, 110, 105, 116, 115, 101, 108, 102, 115, 111, 99, 105, 97, 108, 97, 99, 116, 105, 118, 101, 99, 111, 108, 117, 109, 110, 114, 101, 99, 111, 114, 100, 102, 111, 108, 108, 111, 119, 116, 105, 116, 108, 101, 62, 101, 105, 116, 104, 101, 114, 108, 101, 110, 103, 116, 104, 102, 97, 109, 105, 108, 121, 102, 114, 105, 101, 110, 100, 108, 97, 121, 111, 117, 116, 97, 117, 116, 104, 111, 114, 99, 114, 101, 97, 116, 101, 114, 101, 118, 105, 101, 119, 115, 117, 109, 109, 101, 114, 115, 101, 114, 118, 101, 114, 112, 108, 97, 121, 101, 100, 112, 108, 97, 121, 101, 114, 101, 120, 112, 97, 110, 100, 112, 111, 108, 105, 99, 121, 102, 111, 114, 109, 97, 116, 100, 111, 117, 98, 108, 101, 112, 111, 105, 110, 116, 115, 115, 101, 114, 105, 101, 115, 112, 101, 114, 115, 111, 110, 108, 105, 118, 105, 110, 103, 100, 101, 115, 105, 103, 110, 109, 111, 110, 116, 104, 115, 102, 111, 114, 99, 101, 115, 117, 110, 105, 113, 117, 101, 119, 101, 105, 103, 104, 116, 112, 101, 111, 112, 108, 101, 101, 110, 101, 114, 103, 121, 110, 97, 116, 117, 114, 101, 115, 101, 97, 114, 99, 104, 102, 105, 103, 117, 114, 101, 104, 97, 118, 105, 110, 103, 99, 117, 115, 116, 111, 109, 111, 102, 102, 115, 101, 116, 108, 101, 116, 116, 101, 114, 119, 105, 110, 100, 111, 119, 115, 117, 98, 109, 105, 116, 114, 101, 110, 100, 101, 114, 103, 114, 111, 117, 112, 115, 117, 112, 108, 111, 97, 100, 104, 101, 97, 108, 116, 104, 109, 101, 116, 104, 111, 100, 118, 105, 100, 101, 111, 115, 115, 99, 104, 111, 111, 108, 102, 117, 116, 117, 114, 101, 115, 104, 97, 100, 111, 119, 100, 101, 98, 97, 116, 101, 118, 97, 108, 117, 101, 115, 79, 98, 106, 101, 99, 116, 111, 116, 104, 101, 114, 115, 114, 105, 103, 104, 116, 115, 108, 101, 97, 103, 117, 101, 99, 104, 114, 111, 109, 101, 115, 105, 109, 112, 108, 101, 110, 111, 116, 105, 99, 101, 115, 104, 97, 114, 101, 100, 101, 110, 100, 105, 110, 103, 115, 101, 97, 115, 111, 110, 114, 101, 112, 111, 114, 116, 111, 110, 108, 105, 110, 101, 115, 113, 117, 97, 114, 101, 98, 117, 116, 116, 111, 110, 105, 109, 97, 103, 101, 115, 101, 110, 97, 98, 108, 101, 109, 111, 118, 105, 110, 103, 108, 97, 116, 101, 115, 116, 119, 105, 110, 116, 101, 114, 70, 114, 97, 110, 99, 101, 112, 101, 114, 105, 111, 100, 115, 116, 114, 111, 110, 103, 114, 101, 112, 101, 97, 116, 76, 111, 110, 100, 111, 110, 100, 101, 116, 97, 105, 108, 102, 111, 114, 109, 101, 100, 100, 101, 109, 97, 110, 100, 115, 101, 99, 117, 114, 101, 112, 97, 115, 115, 101, 100, 116, 111, 103, 103, 108, 101, 112, 108, 97, 99, 101, 115, 100, 101, 118, 105, 99, 101, 115, 116, 97, 116, 105, 99, 99, 105, 116, 105, 101, 115, 115, 116, 114, 101, 97, 109, 121, 101, 108, 108, 111, 119, 97, 116, 116, 97, 99, 107, 115, 116, 114, 101, 101, 116, 102, 108, 105, 103, 104, 116, 104, 105, 100, 100, 101, 110, 105, 110, 102, 111, 34, 62, 111, 112, 101, 110, 101, 100, 117, 115, 101, 102, 117, 108, 118, 97, 108, 108, 101, 121, 99, 97, 117, 115, 101, 115, 108, 101, 97, 100, 101, 114, 115, 101, 99, 114, 101, 116, 115, 101, 99, 111, 110, 100, 100, 97, 109, 97, 103, 101, 115, 112, 111, 114, 116, 115, 101, 120, 99, 101, 112, 116, 114, 97, 116, 105, 110, 103, 115, 105, 103, 110, 101, 100, 116, 104, 105, 110, 103, 115, 101, 102, 102, 101, 99, 116, 102, 105, 101, 108, 100, 115, 115, 116, 97, 116, 101, 115, 111, 102, 102, 105, 99, 101, 118, 105, 115, 117, 97, 108, 101, 100, 105, 116, 111, 114, 118, 111, 108, 117, 109, 101, 82, 101, 112, 111, 114, 116, 109, 117, 115, 101, 117, 109, 109, 111, 118, 105, 101, 115, 112, 97, 114, 101, 110, 116, 97, 99, 99, 101, 115, 115, 109, 111, 115, 116, 108, 121, 109, 111, 116, 104, 101, 114, 34, 32, 105, 100, 61, 34, 109, 97, 114, 107, 101, 116, 103, 114, 111, 117, 110, 100, 99, 104, 97, 110, 99, 101, 115, 117, 114, 118, 101, 121, 98, 101, 102, 111, 114, 101, 115, 121, 109, 98, 111, 108, 109, 111, 109, 101, 110, 116, 115, 112, 101, 101, 99, 104, 109, 111, 116, 105, 111, 110, 105, 110, 115, 105, 100, 101, 109, 97, 116, 116, 101, 114, 67, 101, 110, 116, 101, 114, 111, 98, 106, 101, 99, 116, 101, 120, 105, 115, 116, 115, 109, 105, 100, 100, 108, 101, 69, 117, 114, 111, 112, 101, 103, 114, 111, 119, 116, 104, 108, 101, 103, 97, 99, 121, 109, 97, 110, 110, 101, 114, 101, 110, 111, 117, 103, 104, 99, 97, 114, 101, 101, 114, 97, 110, 115, 119, 101, 114, 111, 114, 105, 103, 105, 110, 112, 111, 114, 116, 97, 108, 99, 108, 105, 101, 110, 116, 115, 101, 108, 101, 99, 116, 114, 97, 110, 100, 111, 109, 99, 108, 111, 115, 101, 100, 116, 111, 112, 105, 99, 115, 99, 111, 109, 105, 110, 103, 102, 97, 116, 104, 101, 114, 111, 112, 116, 105, 111, 110, 115, 105, 109, 112, 108, 121, 114, 97, 105, 115, 101, 100, 101, 115, 99, 97, 112, 101, 99, 104, 111, 115, 101, 110, 99, 104, 117, 114, 99, 104, 100, 101, 102, 105, 110, 101, 114, 101, 97, 115, 111, 110, 99, 111, 114, 110, 101, 114, 111, 117, 116, 112, 117, 116, 109, 101, 109, 111, 114, 121, 105, 102, 114, 97, 109, 101, 112, 111, 108, 105, 99, 101, 109, 111, 100, 101, 108, 115, 78, 117, 109, 98, 101, 114, 100, 117, 114, 105, 110, 103, 111, 102, 102, 101, 114, 115, 115, 116, 121, 108, 101, 115, 107, 105, 108, 108, 101, 100, 108, 105, 115, 116, 101, 100, 99, 97, 108, 108, 101, 100, 115, 105, 108, 118, 101, 114, 109, 97, 114, 103, 105, 110, 100, 101, 108, 101, 116, 101, 98, 101, 116, 116, 101, 114, 98, 114, 111, 119, 115, 101, 108, 105, 109, 105, 116, 115, 71, 108, 111, 98, 97, 108, 115, 105, 110, 103, 108, 101, 119, 105, 100, 103, 101, 116, 99, 101, 110, 116, 101, 114, 98, 117, 100, 103, 101, 116, 110, 111, 119, 114, 97, 112, 99, 114, 101, 100, 105, 116, 99, 108, 97, 105, 109, 115, 101, 110, 103, 105, 110, 101, 115, 97, 102, 101, 116, 121, 99, 104, 111, 105, 99, 101, 115, 112, 105, 114, 105, 116, 45, 115, 116, 121, 108, 101, 115, 112, 114, 101, 97, 100, 109, 97, 107, 105, 110, 103, 110, 101, 101, 100, 101, 100, 114, 117, 115, 115, 105, 97, 112, 108, 101, 97, 115, 101, 101, 120, 116, 101, 110, 116, 83, 99, 114, 105, 112, 116, 98, 114, 111, 107, 101, 110, 97, 108, 108, 111, 119, 115, 99, 104, 97, 114, 103, 101, 100, 105, 118, 105, 100, 101, 102, 97, 99, 116, 111, 114, 109, 101, 109, 98, 101, 114, 45, 98, 97, 115, 101, 100, 116, 104, 101, 111, 114, 121, 99, 111, 110, 102, 105, 103, 97, 114, 111, 117, 110, 100, 119, 111, 114, 107, 101, 100, 104, 101, 108, 112, 101, 100, 67, 104, 117, 114, 99, 104, 105, 109, 112, 97, 99, 116, 115, 104, 111, 117, 108, 100, 97, 108, 119, 97, 121, 115, 108, 111, 103, 111, 34, 32, 98, 111, 116, 116, 111, 109, 108, 105, 115, 116, 34, 62, 41, 123, 118, 97, 114, 32, 112, 114, 101, 102, 105, 120, 111, 114, 97, 110, 103, 101, 72, 101, 97, 100, 101, 114, 46, 112, 117, 115, 104, 40, 99, 111, 117, 112, 108, 101, 103, 97, 114, 100, 101, 110, 98, 114, 105, 100, 103, 101, 108, 97, 117, 110, 99, 104, 82, 101, 118, 105, 101, 119, 116, 97, 107, 105, 110, 103, 118, 105, 115, 105, 111, 110, 108, 105, 116, 116, 108, 101, 100, 97, 116, 105, 110, 103, 66, 117, 116, 116, 111, 110, 98, 101, 97, 117, 116, 121, 116, 104, 101, 109, 101, 115, 102, 111, 114, 103, 111, 116, 83, 101, 97, 114, 99, 104, 97, 110, 99, 104, 111, 114, 97, 108, 109, 111, 115, 116, 108, 111, 97, 100, 101, 100, 67, 104, 97, 110, 103, 101, 114, 101, 116, 117, 114, 110, 115, 116, 114, 105, 110, 103, 114, 101, 108, 111, 97, 100, 77, 111, 98, 105, 108, 101, 105, 110, 99, 111, 109, 101, 115, 117, 112, 112, 108, 121, 83, 111, 117, 114, 99, 101, 111, 114, 100, 101, 114, 115, 118, 105, 101, 119, 101, 100, 38, 110, 98, 115, 112, 59, 99, 111, 117, 114, 115, 101, 65, 98, 111, 117, 116, 32, 105, 115, 108, 97, 110, 100, 60, 104, 116, 109, 108, 32, 99, 111, 111, 107, 105, 101, 110, 97, 109, 101, 61, 34, 97, 109, 97, 122, 111, 110, 109, 111, 100, 101, 114, 110, 97, 100, 118, 105, 99, 101, 105, 110, 60, 47, 97, 62, 58, 32, 84, 104, 101, 32, 100, 105, 97, 108, 111, 103, 104, 111, 117, 115, 101, 115, 66, 69, 71, 73, 78, 32, 77, 101, 120, 105, 99, 111, 115, 116, 97, 114, 116, 115, 99, 101, 110, 116, 114, 101, 104, 101, 105, 103, 104, 116, 97, 100, 100, 105, 110, 103, 73, 115, 108, 97, 110, 100, 97, 115, 115, 101, 116, 115, 69, 109, 112, 105, 114, 101, 83, 99, 104, 111, 111, 108, 101, 102, 102, 111, 114, 116, 100, 105, 114, 101, 99, 116, 110, 101, 97, 114, 108, 121, 109, 97, 110, 117, 97, 108, 83, 101, 108, 101, 99, 116, 46, 10, 10, 79, 110, 101, 106, 111, 105, 110, 101, 100, 109, 101, 110, 117, 34, 62, 80, 104, 105, 108, 105, 112, 97, 119, 97, 114, 100, 115, 104, 97, 110, 100, 108, 101, 105, 109, 112, 111, 114, 116, 79, 102, 102, 105, 99, 101, 114, 101, 103, 97, 114, 100, 115, 107, 105, 108, 108, 115, 110, 97, 116, 105, 111, 110, 83, 112, 111, 114, 116, 115, 100, 101, 103, 114, 101, 101, 119, 101, 101, 107, 108, 121, 32, 40, 101, 46, 103, 46, 98, 101, 104, 105, 110, 100, 100, 111, 99, 116, 111, 114, 108, 111, 103, 103, 101, 100, 117, 110, 105, 116, 101, 100, 60, 47, 98, 62, 60, 47, 98, 101, 103, 105, 110, 115, 112, 108, 97, 110, 116, 115, 97, 115, 115, 105, 115, 116, 97, 114, 116, 105, 115, 116, 105, 115, 115, 117, 101, 100, 51, 48, 48, 112, 120, 124, 99, 97, 110, 97, 100, 97, 97, 103, 101, 110, 99, 121, 115, 99, 104, 101, 109, 101, 114, 101, 109, 97, 105, 110, 66, 114, 97, 122, 105, 108, 115, 97, 109, 112, 108, 101, 108, 111, 103, 111, 34, 62, 98, 101, 121, 111, 110, 100, 45, 115, 99, 97, 108, 101, 97, 99, 99, 101, 112, 116, 115, 101, 114, 118, 101, 100, 109, 97, 114, 105, 110, 101, 70, 111, 111, 116, 101, 114, 99, 97, 109, 101, 114, 97, 60, 47, 104, 49, 62, 10, 95, 102, 111, 114, 109, 34, 108, 101, 97, 118, 101, 115, 115, 116, 114, 101, 115, 115, 34, 32, 47, 62, 13, 10, 46, 103, 105, 102, 34, 32, 111, 110, 108, 111, 97, 100, 108, 111, 97, 100, 101, 114, 79, 120, 102, 111, 114, 100, 115, 105, 115, 116, 101, 114, 115, 117, 114, 118, 105, 118, 108, 105, 115, 116, 101, 110, 102, 101, 109, 97, 108, 101, 68, 101, 115, 105, 103, 110, 115, 105, 122, 101, 61, 34, 97, 112, 112, 101, 97, 108, 116, 101, 120, 116, 34, 62, 108, 101, 118, 101, 108, 115, 116, 104, 97, 110, 107, 115, 104, 105, 103, 104, 101, 114, 102, 111, 114, 99, 101, 100, 97, 110, 105, 109, 97, 108, 97, 110, 121, 111, 110, 101, 65, 102, 114, 105, 99, 97, 97, 103, 114, 101, 101, 100, 114, 101, 99, 101, 110, 116, 80, 101, 111, 112, 108, 101, 60, 98, 114, 32, 47, 62, 119, 111, 110, 100, 101, 114, 112, 114, 105, 99, 101, 115, 116, 117, 114, 110, 101, 100, 124, 124, 32, 123, 125, 59, 109, 97, 105, 110, 34, 62, 105, 110, 108, 105, 110, 101, 115, 117, 110, 100, 97, 121, 119, 114, 97, 112, 34, 62, 102, 97, 105, 108, 101, 100, 99, 101, 110, 115, 117, 115, 109, 105, 110, 117, 116, 101, 98, 101, 97, 99, 111, 110, 113, 117, 111, 116, 101, 115, 49, 53, 48, 112, 120, 124, 101, 115, 116, 97, 116, 101, 114, 101, 109, 111, 116, 101, 101, 109, 97, 105, 108, 34, 108, 105, 110, 107, 101, 100, 114, 105, 103, 104, 116, 59, 115, 105, 103, 110, 97, 108, 102, 111, 114, 109, 97, 108, 49, 46, 104, 116, 109, 108, 115, 105, 103, 110, 117, 112, 112, 114, 105, 110, 99, 101, 102, 108, 111, 97, 116, 58, 46, 112, 110, 103, 34, 32, 102, 111, 114, 117, 109, 46, 65, 99, 99, 101, 115, 115, 112, 97, 112, 101, 114, 115, 115, 111, 117, 110, 100, 115, 101, 120, 116, 101, 110, 100, 72, 101, 105, 103, 104, 116, 115, 108, 105, 100, 101, 114, 85, 84, 70, 45, 56, 34, 38, 97, 109, 112, 59, 32, 66, 101, 102, 111, 114, 101, 46, 32, 87, 105, 116, 104, 115, 116, 117, 100, 105, 111, 111, 119, 110, 101, 114, 115, 109, 97, 110, 97, 103, 101, 112, 114, 111, 102, 105, 116, 106, 81, 117, 101, 114, 121, 97, 110, 110, 117, 97, 108, 112, 97, 114, 97, 109, 115, 98, 111, 117, 103, 104, 116, 102, 97, 109, 111, 117, 115, 103, 111, 111, 103, 108, 101, 108, 111, 110, 103, 101, 114, 105, 43, 43, 41, 32, 123, 105, 115, 114, 97, 101, 108, 115, 97, 121, 105, 110, 103, 100, 101, 99, 105, 100, 101, 104, 111, 109, 101, 34, 62, 104, 101, 97, 100, 101, 114, 101, 110, 115, 117, 114, 101, 98, 114, 97, 110, 99, 104, 112, 105, 101, 99, 101, 115, 98, 108, 111, 99, 107, 59, 115, 116, 97, 116, 101, 100, 116, 111, 112, 34, 62, 60, 114, 97, 99, 105, 110, 103, 114, 101, 115, 105, 122, 101, 45, 45, 38, 103, 116, 59, 112, 97, 99, 105, 116, 121, 115, 101, 120, 117, 97, 108, 98, 117, 114, 101, 97, 117, 46, 106, 112, 103, 34, 32, 49, 48, 44, 48, 48, 48, 111, 98, 116, 97, 105, 110, 116, 105, 116, 108, 101, 115, 97, 109, 111, 117, 110, 116, 44, 32, 73, 110, 99, 46, 99, 111, 109, 101, 100, 121, 109, 101, 110, 117, 34, 32, 108, 121, 114, 105, 99, 115, 116, 111, 100, 97, 121, 46, 105, 110, 100, 101, 101, 100, 99, 111, 117, 110, 116, 121, 95, 108, 111, 103, 111, 46, 70, 97, 109, 105, 108, 121, 108, 111, 111, 107, 101, 100, 77, 97, 114, 107, 101, 116, 108, 115, 101, 32, 105, 102, 80, 108, 97, 121, 101, 114, 116, 117, 114, 107, 101, 121, 41, 59, 118, 97, 114, 32, 102, 111, 114, 101, 115, 116, 103, 105, 118, 105, 110, 103, 101, 114, 114, 111, 114, 115, 68, 111, 109, 97, 105, 110, 125, 101, 108, 115, 101, 123, 105, 110, 115, 101, 114, 116, 66, 108, 111, 103, 60, 47, 102, 111, 111, 116, 101, 114, 108, 111, 103, 105, 110, 46, 102, 97, 115, 116, 101, 114, 97, 103, 101, 110, 116, 115, 60, 98, 111, 100, 121, 32, 49, 48, 112, 120, 32, 48, 112, 114, 97, 103, 109, 97, 102, 114, 105, 100, 97, 121, 106, 117, 110, 105, 111, 114, 100, 111, 108, 108, 97, 114, 112, 108, 97, 99, 101, 100, 99, 111, 118, 101, 114, 115, 112, 108, 117, 103, 105, 110, 53, 44, 48, 48, 48, 32, 112, 97, 103, 101, 34, 62, 98, 111, 115, 116, 111, 110, 46, 116, 101, 115, 116, 40, 97, 118, 97, 116, 97, 114, 116, 101, 115, 116, 101, 100, 95, 99, 111, 117, 110, 116, 102, 111, 114, 117, 109, 115, 115, 99, 104, 101, 109, 97, 105, 110, 100, 101, 120, 44, 102, 105, 108, 108, 101, 100, 115, 104, 97, 114, 101, 115, 114, 101, 97, 100, 101, 114, 97, 108, 101, 114, 116, 40, 97, 112, 112, 101, 97, 114, 83, 117, 98, 109, 105, 116, 108, 105, 110, 101, 34, 62, 98, 111, 100, 121, 34, 62, 10, 42, 32, 84, 104, 101, 84, 104, 111, 117, 103, 104, 115, 101, 101, 105, 110, 103, 106, 101, 114, 115, 101, 121, 78, 101, 119, 115, 60, 47, 118, 101, 114, 105, 102, 121, 101, 120, 112, 101, 114, 116, 105, 110, 106, 117, 114, 121, 119, 105, 100, 116, 104, 61, 67, 111, 111, 107, 105, 101, 83, 84, 65, 82, 84, 32, 97, 99, 114, 111, 115, 115, 95, 105, 109, 97, 103, 101, 116, 104, 114, 101, 97, 100, 110, 97, 116, 105, 118, 101, 112, 111, 99, 107, 101, 116, 98, 111, 120, 34, 62, 10, 83, 121, 115, 116, 101, 109, 32, 68, 97, 118, 105, 100, 99, 97, 110, 99, 101, 114, 116, 97, 98, 108, 101, 115, 112, 114, 111, 118, 101, 100, 65, 112, 114, 105, 108, 32, 114, 101, 97, 108, 108, 121, 100, 114, 105, 118, 101, 114, 105, 116, 101, 109, 34, 62, 109, 111, 114, 101, 34, 62, 98, 111, 97, 114, 100, 115, 99, 111, 108, 111, 114, 115, 99, 97, 109, 112, 117, 115, 102, 105, 114, 115, 116, 32, 124, 124, 32, 91, 93, 59, 109, 101, 100, 105, 97, 46, 103, 117, 105, 116, 97, 114, 102, 105, 110, 105, 115, 104, 119, 105, 100, 116, 104, 58, 115, 104, 111, 119, 101, 100, 79, 116, 104, 101, 114, 32, 46, 112, 104, 112, 34, 32, 97, 115, 115, 117, 109, 101, 108, 97, 121, 101, 114, 115, 119, 105, 108, 115, 111, 110, 115, 116, 111, 114, 101, 115, 114, 101, 108, 105, 101, 102, 115, 119, 101, 100, 101, 110, 67, 117, 115, 116, 111, 109, 101, 97, 115, 105, 108, 121, 32, 121, 111, 117, 114, 32, 83, 116, 114, 105, 110, 103, 10, 10, 87, 104, 105, 108, 116, 97, 121, 108, 111, 114, 99, 108, 101, 97, 114, 58, 114, 101, 115, 111, 114, 116, 102, 114, 101, 110, 99, 104, 116, 104, 111, 117, 103, 104, 34, 41, 32, 43, 32, 34, 60, 98, 111, 100, 121, 62, 98, 117, 121, 105, 110, 103, 98, 114, 97, 110, 100, 115, 77, 101, 109, 98, 101, 114, 110, 97, 109, 101, 34, 62, 111, 112, 112, 105, 110, 103, 115, 101, 99, 116, 111, 114, 53, 112, 120, 59, 34, 62, 118, 115, 112, 97, 99, 101, 112, 111, 115, 116, 101, 114, 109, 97, 106, 111, 114, 32, 99, 111, 102, 102, 101, 101, 109, 97, 114, 116, 105, 110, 109, 97, 116, 117, 114, 101, 104, 97, 112, 112, 101, 110, 60, 47, 110, 97, 118, 62, 107, 97, 110, 115, 97, 115, 108, 105, 110, 107, 34, 62, 73, 109, 97, 103, 101, 115, 61, 102, 97, 108, 115, 101, 119, 104, 105, 108, 101, 32, 104, 115, 112, 97, 99, 101, 48, 38, 97, 109, 112, 59, 32, 10, 10, 73, 110, 32, 32, 112, 111, 119, 101, 114, 80, 111, 108, 115, 107, 105, 45, 99, 111, 108, 111, 114, 106, 111, 114, 100, 97, 110, 66, 111, 116, 116, 111, 109, 83, 116, 97, 114, 116, 32, 45, 99, 111, 117, 110, 116, 50, 46, 104, 116, 109, 108, 110, 101, 119, 115, 34, 62, 48, 49, 46, 106, 112, 103, 79, 110, 108, 105, 110, 101, 45, 114, 105, 103, 104, 116, 109, 105, 108, 108, 101, 114, 115, 101, 110, 105, 111, 114, 73, 83, 66, 78, 32, 48, 48, 44, 48, 48, 48, 32, 103, 117, 105, 100, 101, 115, 118, 97, 108, 117, 101, 41, 101, 99, 116, 105, 111, 110, 114, 101, 112, 97, 105, 114, 46, 120, 109, 108, 34, 32, 32, 114, 105, 103, 104, 116, 115, 46, 104, 116, 109, 108, 45, 98, 108, 111, 99, 107, 114, 101, 103, 69, 120, 112, 58, 104, 111, 118, 101, 114, 119, 105, 116, 104, 105, 110, 118, 105, 114, 103, 105, 110, 112, 104, 111, 110, 101, 115, 60, 47, 116, 114, 62, 13, 117, 115, 105, 110, 103, 32, 10, 9, 118, 97, 114, 32, 62, 39, 41, 59, 10, 9, 60, 47, 116, 100, 62, 10, 60, 47, 116, 114, 62, 10, 98, 97, 104, 97, 115, 97, 98, 114, 97, 115, 105, 108, 103, 97, 108, 101, 103, 111, 109, 97, 103, 121, 97, 114, 112, 111, 108, 115, 107, 105, 115, 114, 112, 115, 107, 105, 216, 177, 216, 175, 217, 136, 228, 184, 173, 230, 150, 135, 231, 174, 128, 228, 189, 147, 231, 185, 129, 233, 171, 148, 228, 191, 161, 230, 129, 175, 228, 184, 173, 229, 155, 189, 230, 136, 145, 228, 187, 172, 228, 184, 128, 228, 184, 170, 229, 133, 172, 229, 143, 184, 231, 174, 161, 231, 144, 134, 232, 174, 186, 229, 157, 155, 229, 143, 175, 228, 187, 165, 230, 156, 141, 229, 138, 161, 230, 151, 182, 233, 151, 180, 228, 184, 170, 228, 186, 186, 228, 186, 167, 229, 147, 129, 232, 135, 170, 229, 183, 177, 228, 188, 129, 228, 184, 154, 230, 159, 165, 231, 156, 139, 229, 183, 165, 228, 189, 156, 232, 129, 148, 231, 179, 187, 230, 178, 161, 230, 156, 137, 231, 189, 145, 231, 171, 153, 230, 137, 128, 230, 156, 137, 232, 175, 132, 232, 174, 186, 228, 184, 173, 229, 191, 131, 230, 150, 135, 231, 171, 160, 231, 148, 168, 230, 136, 183, 233, 166, 150, 233, 161, 181, 228, 189, 156, 232, 128, 133, 230, 138, 128, 230, 156, 175, 233, 151, 174, 233, 162, 152, 231, 155, 184, 229, 133, 179, 228, 184, 139, 232, 189, 189, 230, 144, 156, 231, 180, 162, 228, 189, 191, 231, 148, 168, 232, 189, 175, 228, 187, 182, 229, 156, 168, 231, 186, 191, 228, 184, 187, 233, 162, 152, 232, 181, 132, 230, 150, 153, 232, 167, 134, 233, 162, 145, 229, 155, 158, 229, 164, 141, 230, 179, 168, 229, 134, 140, 231, 189, 145, 231, 187, 156, 230, 148, 182, 232, 151, 143, 229, 134, 133, 229, 174, 185, 230, 142, 168, 232, 141, 144, 229, 184, 130, 229, 156, 186, 230, 182, 136, 230, 129, 175, 231, 169, 186, 233, 151, 180, 229, 143, 145, 229, 184, 131, 228, 187, 128, 228, 185, 136, 229, 165, 189, 229, 143, 139, 231, 148, 159, 230, 180, 187, 229, 155, 190, 231, 137, 135, 229, 143, 145, 229, 177, 149, 229, 166, 130, 230, 158, 156, 230, 137, 139, 230, 156, 186, 230, 150, 176, 233, 151, 187, 230, 156, 128, 230, 150, 176, 230, 150, 185, 229, 188, 143, 229, 140, 151, 228, 186, 172, 230, 143, 144, 228, 190, 155, 229, 133, 179, 228, 186, 142, 230, 155, 180, 229, 164, 154, 232, 191, 153, 228, 184, 170, 231, 179, 187, 231, 187, 159, 231, 159, 165, 233, 129, 147, 230, 184, 184, 230, 136, 143, 229, 185, 191, 229, 145, 138, 229, 133, 182, 228, 187, 150, 229, 143, 145, 232, 161, 168, 229, 174, 137, 229, 133, 168, 231, 172, 172, 228, 184, 128, 228, 188, 154, 229, 145, 152, 232, 191, 155, 232, 161, 140, 231, 130, 185, 229, 135, 187, 231, 137, 136, 230, 157, 131, 231, 148, 181, 229, 173, 144, 228, 184, 150, 231, 149, 140, 232, 174, 190, 232, 174, 161, 229, 133, 141, 232, 180, 185, 230, 149, 153, 232, 130, 178, 229, 138, 160, 229, 133, 165, 230, 180, 187, 229, 138, 168, 228, 187, 150, 228, 187, 172, 229, 149, 134, 229, 147, 129, 229, 141, 154, 229, 174, 162, 231, 142, 176, 229, 156, 168, 228, 184, 138, 230, 181, 183, 229, 166, 130, 228, 189, 149, 229, 183, 178, 231, 187, 143, 231, 149, 153, 232, 168, 128, 232, 175, 166, 231, 187, 134, 231, 164, 190, 229, 140, 186, 231, 153, 187, 229, 189, 149, 230, 156, 172, 231, 171, 153, 233, 156, 128, 232, 166, 129, 228, 187, 183, 230, 160, 188, 230, 148, 175, 230, 140, 129, 229, 155, 189, 233, 153, 133, 233, 147, 190, 230, 142, 165, 229, 155, 189, 229, 174, 182, 229, 187, 186, 232, 174, 190, 230, 156, 139, 229, 143, 139, 233, 152, 133, 232, 175, 187, 230, 179, 149, 229, 190, 139, 228, 189, 141, 231, 189, 174, 231, 187, 143, 230, 181, 142, 233, 128, 137, 230, 139, 169, 232, 191, 153, 230, 160, 183, 229, 189, 147, 229, 137, 141, 229, 136, 134, 231, 177, 187, 230, 142, 146, 232, 161, 140, 229, 155, 160, 228, 184, 186, 228, 186, 164, 230, 152, 147, 230, 156, 128, 229, 144, 142, 233, 159, 179, 228, 185, 144, 228, 184, 141, 232, 131, 189, 233, 128, 154, 232, 191, 135, 232, 161, 140, 228, 184, 154, 231, 167, 145, 230, 138, 128, 229, 143, 175, 232, 131, 189, 232, 174, 190, 229, 164, 135, 229, 144, 136, 228, 189, 156, 229, 164, 167, 229, 174, 182, 231, 164, 190, 228, 188, 154, 231, 160, 148, 231, 169, 182, 228, 184, 147, 228, 184, 154, 229, 133, 168, 233, 131, 168, 233, 161, 185, 231, 155, 174, 232, 191, 153, 233, 135, 140, 232, 191, 152, 230, 152, 175, 229, 188, 128, 229, 167, 139, 230, 131, 133, 229, 134, 181, 231, 148, 181, 232, 132, 145, 230, 150, 135, 228, 187, 182, 229, 147, 129, 231, 137, 140, 229, 184, 174, 229, 138, 169, 230, 150, 135, 229, 140, 150, 232, 181, 132, 230, 186, 144, 229, 164, 167, 229, 173, 166, 229, 173, 166, 228, 185, 160, 229, 156, 176, 229, 157, 128, 230, 181, 143, 232, 167, 136, 230, 138, 149, 232, 181, 132, 229, 183, 165, 231, 168, 139, 232, 166, 129, 230, 177, 130, 230, 128, 142, 228, 185, 136, 230, 151, 182, 229, 128, 153, 229, 138, 159, 232, 131, 189, 228, 184, 187, 232, 166, 129, 231, 155, 174, 229, 137, 141, 232, 181, 132, 232, 174, 175, 229, 159, 142, 229, 184, 130, 230, 150, 185, 230, 179, 149, 231, 148, 181, 229, 189, 177, 230, 139, 155, 232, 129, 152, 229, 163, 176, 230, 152, 142, 228, 187, 187, 228, 189, 149, 229, 129, 165, 229, 186, 183, 230, 149, 176, 230, 141, 174, 231, 190, 142, 229, 155, 189, 230, 177, 189, 232, 189, 166, 228, 187, 139, 231, 187, 141, 228, 189, 134, 230, 152, 175, 228, 186, 164, 230, 181, 129, 231, 148, 159, 228, 186, 167, 230, 137, 128, 228, 187, 165, 231, 148, 181, 232, 175, 157, 230, 152, 190, 231, 164, 186, 228, 184, 128, 228, 186, 155, 229, 141, 149, 228, 189, 141, 228, 186, 186, 229, 145, 152, 229, 136, 134, 230, 158, 144, 229, 156, 176, 229, 155, 190, 230, 151, 133, 230, 184, 184, 229, 183, 165, 229, 133, 183, 229, 173, 166, 231, 148, 159, 231, 179, 187, 229, 136, 151, 231, 189, 145, 229, 143, 139, 229, 184, 150, 229, 173, 144, 229, 175, 134, 231, 160, 129, 233, 162, 145, 233, 129, 147, 230, 142, 167, 229, 136, 182, 229, 156, 176, 229, 140, 186, 229, 159, 186, 230, 156, 172, 229, 133, 168, 229, 155, 189, 231, 189, 145, 228, 184, 138, 233, 135, 141, 232, 166, 129, 231, 172, 172, 228, 186, 140, 229, 150, 156, 230, 172, 162, 232, 191, 155, 229, 133, 165, 229, 143, 139, 230, 131, 133, 232, 191, 153, 228, 186, 155, 232, 128, 131, 232, 175, 149, 229, 143, 145, 231, 142, 176, 229, 159, 185, 232, 174, 173, 228, 187, 165, 228, 184, 138, 230, 148, 191, 229, 186, 156, 230, 136, 144, 228, 184, 186, 231, 142, 175, 229, 162, 131, 233, 166, 153, 230, 184, 175, 229, 144, 140, 230, 151, 182, 229, 168, 177, 228, 185, 144, 229, 143, 145, 233, 128, 129, 228, 184, 128, 229, 174, 154, 229, 188, 128, 229, 143, 145, 228, 189, 156, 229, 147, 129, 230, 160, 135, 229, 135, 134, 230, 172, 162, 232, 191, 142, 232, 167, 163, 229, 134, 179, 229, 156, 176, 230, 150, 185, 228, 184, 128, 228, 184, 139, 228, 187, 165, 229, 143, 138, 232, 180, 163, 228, 187, 187, 230, 136, 150, 232, 128, 133, 229, 174, 162, 230, 136, 183, 228, 187, 163, 232, 161, 168, 231, 167, 175, 229, 136, 134, 229, 165, 179, 228, 186, 186, 230, 149, 176, 231, 160, 129, 233, 148, 128, 229, 148, 174, 229, 135, 186, 231, 142, 176, 231, 166, 187, 231, 186, 191, 229, 186, 148, 231, 148, 168, 229, 136, 151, 232, 161, 168, 228, 184, 141, 229, 144, 140, 231, 188, 150, 232, 190, 145, 231, 187, 159, 232, 174, 161, 230, 159, 165, 232, 175, 162, 228, 184, 141, 232, 166, 129, 230, 156, 137, 229, 133, 179, 230, 156, 186, 230, 158, 132, 229, 190, 136, 229, 164, 154, 230, 146, 173, 230, 148, 190, 231, 187, 132, 231, 187, 135, 230, 148, 191, 231, 173, 150, 231, 155, 180, 230, 142, 165, 232, 131, 189, 229, 138, 155, 230, 157, 165, 230, 186, 144, 230, 153, 130, 233, 150, 147, 231, 156, 139, 229, 136, 176, 231, 131, 173, 233, 151, 168, 229, 133, 179, 233, 148, 174, 228, 184, 147, 229, 140, 186, 233, 157, 158, 229, 184, 184, 232, 139, 177, 232, 175, 173, 231, 153, 190, 229, 186, 166, 229, 184, 140, 230, 156, 155, 231, 190, 142, 229, 165, 179, 230, 175, 148, 232, 190, 131, 231, 159, 165, 232, 175, 134, 232, 167, 132, 229, 174, 154, 229, 187, 186, 232, 174, 174, 233, 131, 168, 233, 151, 168, 230, 132, 143, 232, 167, 129, 231, 178, 190, 229, 189, 169, 230, 151, 165, 230, 156, 172, 230, 143, 144, 233, 171, 152, 229, 143, 145, 232, 168, 128, 230, 150, 185, 233, 157, 162, 229, 159, 186, 233, 135, 145, 229, 164, 132, 231, 144, 134, 230, 157, 131, 233, 153, 144, 229, 189, 177, 231, 137, 135, 233, 147, 182, 232, 161, 140, 232, 191, 152, 230, 156, 137, 229, 136, 134, 228, 186, 171, 231, 137, 169, 229, 147, 129, 231, 187, 143, 232, 144, 165, 230, 183, 187, 229, 138, 160, 228, 184, 147, 229, 174, 182, 232, 191, 153, 231, 167, 141, 232, 175, 157, 233, 162, 152, 232, 181, 183, 230, 157, 165, 228, 184, 154, 229, 138, 161, 229, 133, 172, 229, 145, 138, 232, 174, 176, 229, 189, 149, 231, 174, 128, 228, 187, 139, 232, 180, 168, 233, 135, 143, 231, 148, 183, 228, 186, 186, 229, 189, 177, 229, 147, 141, 229, 188, 149, 231, 148, 168, 230, 138, 165, 229, 145, 138, 233, 131, 168, 229, 136, 134, 229, 191, 171, 233, 128, 159, 229, 146, 168, 232, 175, 162, 230, 151, 182, 229, 176, 154, 230, 179, 168, 230, 132, 143, 231, 148, 179, 232, 175, 183, 229, 173, 166, 230, 160, 161, 229, 186, 148, 232, 175, 165, 229, 142, 134, 229, 143, 178, 229, 143, 170, 230, 152, 175, 232, 191, 148, 229, 155, 158, 232, 180, 173, 228, 185, 176, 229, 144, 141, 231, 167, 176, 228, 184, 186, 228, 186, 134, 230, 136, 144, 229, 138, 159, 232, 175, 180, 230, 152, 142, 228, 190, 155, 229, 186, 148, 229, 173, 169, 229, 173, 144, 228, 184, 147, 233, 162, 152, 231, 168, 139, 229, 186, 143, 228, 184, 128, 232, 136, 172, 230, 156, 131, 229, 147, 161, 229, 143, 170, 230, 156, 137, 229, 133, 182, 229, 174, 131, 228, 191, 157, 230, 138, 164, 232, 128, 140, 228, 184, 148, 228, 187, 138, 229, 164, 169, 231, 170, 151, 229, 143, 163, 229, 138, 168, 230, 128, 129, 231, 138, 182, 230, 128, 129, 231, 137, 185, 229, 136, 171, 232, 174, 164, 228, 184, 186, 229, 191, 133, 233, 161, 187, 230, 155, 180, 230, 150, 176, 229, 176, 143, 232, 175, 180, 230, 136, 145, 229, 128, 145, 228, 189, 156, 228, 184, 186, 229, 170, 146, 228, 189, 147, 229, 140, 133, 230, 139, 172, 233, 130, 163, 228, 185, 136, 228, 184, 128, 230, 160, 183, 229, 155, 189, 229, 134, 133, 230, 152, 175, 229, 144, 166, 230, 160, 185, 230, 141, 174, 231, 148, 181, 232, 167, 134, 229, 173, 166, 233, 153, 162, 229, 133, 183, 230, 156, 137, 232, 191, 135, 231, 168, 139, 231, 148, 177, 228, 186, 142, 228, 186, 186, 230, 137, 141, 229, 135, 186, 230, 157, 165, 228, 184, 141, 232, 191, 135, 230, 173, 163, 229, 156, 168, 230, 152, 142, 230, 152, 159, 230, 149, 133, 228, 186, 139, 229, 133, 179, 231, 179, 187, 230, 160, 135, 233, 162, 152, 229, 149, 134, 229, 138, 161, 232, 190, 147, 229, 133, 165, 228, 184, 128, 231, 155, 180, 229, 159, 186, 231, 161, 128, 230, 149, 153, 229, 173, 166, 228, 186, 134, 232, 167, 163, 229, 187, 186, 231, 173, 145, 231, 187, 147, 230, 158, 156, 229, 133, 168, 231, 144, 131, 233, 128, 154, 231, 159, 165, 232, 174, 161, 229, 136, 146, 229, 175, 185, 228, 186, 142, 232, 137, 186, 230, 156, 175, 231, 155, 184, 229, 134, 140, 229, 143, 145, 231, 148, 159, 231, 156, 159, 231, 154, 132, 229, 187, 186, 231, 171, 139, 231, 173, 137, 231, 186, 167, 231, 177, 187, 229, 158, 139, 231, 187, 143, 233, 170, 140, 229, 174, 158, 231, 142, 176, 229, 136, 182, 228, 189, 156, 230, 157, 165, 232, 135, 170, 230, 160, 135, 231, 173, 190, 228, 187, 165, 228, 184, 139, 229, 142, 159, 229, 136, 155, 230, 151, 160, 230, 179, 149, 229, 133, 182, 228, 184, 173, 229, 128, 139, 228, 186, 186, 228, 184, 128, 229, 136, 135, 230, 140, 135, 229, 141, 151, 229, 133, 179, 233, 151, 173, 233, 155, 134, 229, 155, 162, 231, 172, 172, 228, 184, 137, 229, 133, 179, 230, 179, 168, 229, 155, 160, 230, 173, 164, 231, 133, 167, 231, 137, 135, 230, 183, 177, 229, 156, 179, 229, 149, 134, 228, 184, 154, 229, 185, 191, 229, 183, 158, 230, 151, 165, 230, 156, 159, 233, 171, 152, 231, 186, 167, 230, 156, 128, 232, 191, 145, 231, 187, 188, 229, 144, 136, 232, 161, 168, 231, 164, 186, 228, 184, 147, 232, 190, 145, 232, 161, 140, 228, 184, 186, 228, 186, 164, 233, 128, 154, 232, 175, 132, 228, 187, 183, 232, 167, 137, 229, 190, 151, 231, 178, 190, 229, 141, 142, 229, 174, 182, 229, 186, 173, 229, 174, 140, 230, 136, 144, 230, 132, 159, 232, 167, 137, 229, 174, 137, 232, 163, 133, 229, 190, 151, 229, 136, 176, 233, 130, 174, 228, 187, 182, 229, 136, 182, 229, 186, 166, 233, 163, 159, 229, 147, 129, 232, 153, 189, 231, 132, 182, 232, 189, 172, 232, 189, 189, 230, 138, 165, 228, 187, 183, 232, 174, 176, 232, 128, 133, 230, 150, 185, 230, 161, 136, 232, 161, 140, 230, 148, 191, 228, 186, 186, 230, 176, 145, 231, 148, 168, 229, 147, 129, 228, 184, 156, 232, 165, 191, 230, 143, 144, 229, 135, 186, 233, 133, 146, 229, 186, 151, 231, 132, 182, 229, 144, 142, 228, 187, 152, 230, 172, 190, 231, 131, 173, 231, 130, 185, 228, 187, 165, 229, 137, 141, 229, 174, 140, 229, 133, 168, 229, 143, 145, 229, 184, 150, 232, 174, 190, 231, 189, 174, 233, 162, 134, 229, 175, 188, 229, 183, 165, 228, 184, 154, 229, 140, 187, 233, 153, 162, 231, 156, 139, 231, 156, 139, 231, 187, 143, 229, 133, 184, 229, 142, 159, 229, 155, 160, 229, 185, 179, 229, 143, 176, 229, 144, 132, 231, 167, 141, 229, 162, 158, 229, 138, 160, 230, 157, 144, 230, 150, 153, 230, 150, 176, 229, 162, 158, 228, 185, 139, 229, 144, 142, 232, 129, 140, 228, 184, 154, 230, 149, 136, 230, 158, 156, 228, 187, 138, 229, 185, 180, 232, 174, 186, 230, 150, 135, 230, 136, 145, 229, 155, 189, 229, 145, 138, 232, 175, 137, 231, 137, 136, 228, 184, 187, 228, 191, 174, 230, 148, 185, 229, 143, 130, 228, 184, 142, 230, 137, 147, 229, 141, 176, 229, 191, 171, 228, 185, 144, 230, 156, 186, 230, 162, 176, 232, 167, 130, 231, 130, 185, 229, 173, 152, 229, 156, 168, 231, 178, 190, 231, 165, 158, 232, 142, 183, 229, 190, 151, 229, 136, 169, 231, 148, 168, 231, 187, 167, 231, 187, 173, 228, 189, 160, 228, 187, 172, 232, 191, 153, 228, 185, 136, 230, 168, 161, 229, 188, 143, 232, 175, 173, 232, 168, 128, 232, 131, 189, 229, 164, 159, 233, 155, 133, 232, 153, 142, 230, 147, 141, 228, 189, 156, 233, 163, 142, 230, 160, 188, 228, 184, 128, 232, 181, 183, 231, 167, 145, 229, 173, 166, 228, 189, 147, 232, 130, 178, 231, 159, 173, 228, 191, 161, 230, 157, 161, 228, 187, 182, 230, 178, 187, 231, 150, 151, 232, 191, 144, 229, 138, 168, 228, 186, 167, 228, 184, 154, 228, 188, 154, 232, 174, 174, 229, 175, 188, 232, 136, 170, 229, 133, 136, 231, 148, 159, 232, 129, 148, 231, 155, 159, 229, 143, 175, 230, 152, 175, 229, 149, 143, 233, 161, 140, 231, 187, 147, 230, 158, 132, 228, 189, 156, 231, 148, 168, 232, 176, 131, 230, 159, 165, 232, 179, 135, 230, 150, 153, 232, 135, 170, 229, 138, 168, 232, 180, 159, 232, 180, 163, 229, 134, 156, 228, 184, 154, 232, 174, 191, 233, 151, 174, 229, 174, 158, 230, 150, 189, 230, 142, 165, 229, 143, 151, 232, 174, 168, 232, 174, 186, 233, 130, 163, 228, 184, 170, 229, 143, 141, 233, 166, 136, 229, 138, 160, 229, 188, 186, 229, 165, 179, 230, 128, 167, 232, 140, 131, 229, 155, 180, 230, 156, 141, 229, 139, 153, 228, 188, 145, 233, 151, 178, 228, 187, 138, 230, 151, 165, 229, 174, 162, 230, 156, 141, 232, 167, 128, 231, 156, 139, 229, 143, 130, 229, 138, 160, 231, 154, 132, 232, 175, 157, 228, 184, 128, 231, 130, 185, 228, 191, 157, 232, 175, 129, 229, 155, 190, 228, 185, 166, 230, 156, 137, 230, 149, 136, 230, 181, 139, 232, 175, 149, 231, 167, 187, 229, 138, 168, 230, 137, 141, 232, 131, 189, 229, 134, 179, 229, 174, 154, 232, 130, 161, 231, 165, 168, 228, 184, 141, 230, 150, 173, 233, 156, 128, 230, 177, 130, 228, 184, 141, 229, 190, 151, 229, 138, 158, 230, 179, 149, 228, 185, 139, 233, 151, 180, 233, 135, 135, 231, 148, 168, 232, 144, 165, 233, 148, 128, 230, 138, 149, 232, 175, 137, 231, 155, 174, 230, 160, 135, 231, 136, 177, 230, 131, 133, 230, 145, 132, 229, 189, 177, 230, 156, 137, 228, 186, 155, 232, 164, 135, 232, 163, 189, 230, 150, 135, 229, 173, 166, 230, 156, 186, 228, 188, 154, 230, 149, 176, 229, 173, 151, 232, 163, 133, 228, 191, 174, 232, 180, 173, 231, 137, 169, 229, 134, 156, 230, 157, 145, 229, 133, 168, 233, 157, 162, 231, 178, 190, 229, 147, 129, 229, 133, 182, 229, 174, 158, 228, 186, 139, 230, 131, 133, 230, 176, 180, 229, 185, 179, 230, 143, 144, 231, 164, 186, 228, 184, 138, 229, 184, 130, 232, 176, 162, 232, 176, 162, 230, 153, 174, 233, 128, 154, 230, 149, 153, 229, 184, 136, 228, 184, 138, 228, 188, 160, 231, 177, 187, 229, 136, 171, 230, 173, 140, 230, 155, 178, 230, 139, 165, 230, 156, 137, 229, 136, 155, 230, 150, 176, 233, 133, 141, 228, 187, 182, 229, 143, 170, 232, 166, 129, 230, 151, 182, 228, 187, 163, 232, 179, 135, 232, 168, 138, 232, 190, 190, 229, 136, 176, 228, 186, 186, 231, 148, 159, 232, 174, 162, 233, 152, 133, 232, 128, 129, 229, 184, 136, 229, 177, 149, 231, 164, 186, 229, 191, 131, 231, 144, 134, 232, 180, 180, 229, 173, 144, 231, 182, 178, 231, 171, 153, 228, 184, 187, 233, 161, 140, 232, 135, 170, 231, 132, 182, 231, 186, 167, 229, 136, 171, 231, 174, 128, 229, 141, 149, 230, 148, 185, 233, 157, 169, 233, 130, 163, 228, 186, 155, 230, 157, 165, 232, 175, 180, 230, 137, 147, 229, 188, 128, 228, 187, 163, 231, 160, 129, 229, 136, 160, 233, 153, 164, 232, 175, 129, 229, 136, 184, 232, 138, 130, 231, 155, 174, 233, 135, 141, 231, 130, 185, 230, 172, 161, 230, 149, 184, 229, 164, 154, 229, 176, 145, 232, 167, 132, 229, 136, 146, 232, 181, 132, 233, 135, 145, 230, 137, 190, 229, 136, 176, 228, 187, 165, 229, 144, 142, 229, 164, 167, 229, 133, 168, 228, 184, 187, 233, 161, 181, 230, 156, 128, 228, 189, 179, 229, 155, 158, 231, 173, 148, 229, 164, 169, 228, 184, 139, 228, 191, 157, 233, 154, 156, 231, 142, 176, 228, 187, 163, 230, 163, 128, 230, 159, 165, 230, 138, 149, 231, 165, 168, 229, 176, 143, 230, 151, 182, 230, 178, 146, 230, 156, 137, 230, 173, 163, 229, 184, 184, 231, 148, 154, 232, 135, 179, 228, 187, 163, 231, 144, 134, 231, 155, 174, 229, 189, 149, 229, 133, 172, 229, 188, 128, 229, 164, 141, 229, 136, 182, 233, 135, 145, 232, 158, 141, 229, 185, 184, 231, 166, 143, 231, 137, 136, 230, 156, 172, 229, 189, 162, 230, 136, 144, 229, 135, 134, 229, 164, 135, 232, 161, 140, 230, 131, 133, 229, 155, 158, 229, 136, 176, 230, 128, 157, 230, 131, 179, 230, 128, 142, 230, 160, 183, 229, 141, 143, 232, 174, 174, 232, 174, 164, 232, 175, 129, 230, 156, 128, 229, 165, 189, 228, 186, 167, 231, 148, 159, 230, 140, 137, 231, 133, 167, 230, 156, 141, 232, 163, 133, 229, 185, 191, 228, 184, 156, 229, 138, 168, 230, 188, 171, 233, 135, 135, 232, 180, 173, 230, 150, 176, 230, 137, 139, 231, 187, 132, 229, 155, 190, 233, 157, 162, 230, 157, 191, 229, 143, 130, 232, 128, 131, 230, 148, 191, 230, 178, 187, 229, 174, 185, 230, 152, 147, 229, 164, 169, 229, 156, 176, 229, 138, 170, 229, 138, 155, 228, 186, 186, 228, 187, 172, 229, 141, 135, 231, 186, 167, 233, 128, 159, 229, 186, 166, 228, 186, 186, 231, 137, 169, 232, 176, 131, 230, 149, 180, 230, 181, 129, 232, 161, 140, 233, 128, 160, 230, 136, 144, 230, 150, 135, 229, 173, 151, 233, 159, 169, 229, 155, 189, 232, 180, 184, 230, 152, 147, 229, 188, 128, 229, 177, 149, 231, 155, 184, 233, 151, 156, 232, 161, 168, 231, 142, 176, 229, 189, 177, 232, 167, 134, 229, 166, 130, 230, 173, 164, 231, 190, 142, 229, 174, 185, 229, 164, 167, 229, 176, 143, 230, 138, 165, 233, 129, 147, 230, 157, 161, 230, 172, 190, 229, 191, 131, 230, 131, 133, 232, 174, 184, 229, 164, 154, 230, 179, 149, 232, 167, 132, 229, 174, 182, 229, 177, 133, 228, 185, 166, 229, 186, 151, 232, 191, 158, 230, 142, 165, 231, 171, 139, 229, 141, 179, 228, 184, 190, 230, 138, 165, 230, 138, 128, 229, 183, 167, 229, 165, 165, 232, 191, 144, 231, 153, 187, 229, 133, 165, 228, 187, 165, 230, 157, 165, 231, 144, 134, 232, 174, 186, 228, 186, 139, 228, 187, 182, 232, 135, 170, 231, 148, 177, 228, 184, 173, 229, 141, 142, 229, 138, 158, 229, 133, 172, 229, 166, 136, 229, 166, 136, 231, 156, 159, 230, 173, 163, 228, 184, 141, 233, 148, 153, 229, 133, 168, 230, 150, 135, 229, 144, 136, 229, 144, 140, 228, 187, 183, 229, 128, 188, 229, 136, 171, 228, 186, 186, 231, 155, 145, 231, 157, 163, 229, 133, 183, 228, 189, 147, 228, 184, 150, 231, 186, 170, 229, 155, 162, 233, 152, 159, 229, 136, 155, 228, 184, 154, 230, 137, 191, 230, 139, 133, 229, 162, 158, 233, 149, 191, 230, 156, 137, 228, 186, 186, 228, 191, 157, 230, 140, 129, 229, 149, 134, 229, 174, 182, 231, 187, 180, 228, 191, 174, 229, 143, 176, 230, 185, 190, 229, 183, 166, 229, 143, 179, 232, 130, 161, 228, 187, 189, 231, 173, 148, 230, 161, 136, 229, 174, 158, 233, 153, 133, 231, 148, 181, 228, 191, 161, 231, 187, 143, 231, 144, 134, 231, 148, 159, 229, 145, 189, 229, 174, 163, 228, 188, 160, 228, 187, 187, 229, 138, 161, 230, 173, 163, 229, 188, 143, 231, 137, 185, 232, 137, 178, 228, 184, 139, 230, 157, 165, 229, 141, 143, 228, 188, 154, 229, 143, 170, 232, 131, 189, 229, 189, 147, 231, 132, 182, 233, 135, 141, 230, 150, 176, 229, 133, 167, 229, 174, 185, 230, 140, 135, 229, 175, 188, 232, 191, 144, 232, 161, 140, 230, 151, 165, 229, 191, 151, 232, 179, 163, 229, 174, 182, 232, 182, 133, 232, 191, 135, 229, 156, 159, 229, 156, 176, 230, 181, 153, 230, 177, 159, 230, 148, 175, 228, 187, 152, 230, 142, 168, 229, 135, 186, 231, 171, 153, 233, 149, 191, 230, 157, 173, 229, 183, 158, 230, 137, 167, 232, 161, 140, 229, 136, 182, 233, 128, 160, 228, 185, 139, 228, 184, 128, 230, 142, 168, 229, 185, 191, 231, 142, 176, 229, 156, 186, 230, 143, 143, 232, 191, 176, 229, 143, 152, 229, 140, 150, 228, 188, 160, 231, 187, 159, 230, 173, 140, 230, 137, 139, 228, 191, 157, 233, 153, 169, 232, 175, 190, 231, 168, 139, 229, 140, 187, 231, 150, 151, 231, 187, 143, 232, 191, 135, 232, 191, 135, 229, 142, 187, 228, 185, 139, 229, 137, 141, 230, 148, 182, 229, 133, 165, 229, 185, 180, 229, 186, 166, 230, 157, 130, 229, 191, 151, 231, 190, 142, 228, 184, 189, 230, 156, 128, 233, 171, 152, 231, 153, 187, 233, 153, 134, 230, 156, 170, 230, 157, 165, 229, 138, 160, 229, 183, 165, 229, 133, 141, 232, 180, 163, 230, 149, 153, 231, 168, 139, 231, 137, 136, 229, 157, 151, 232, 186, 171, 228, 189, 147, 233, 135, 141, 229, 186, 134, 229, 135, 186, 229, 148, 174, 230, 136, 144, 230, 156, 172, 229, 189, 162, 229, 188, 143, 229, 156, 159, 232, 177, 134, 229, 135, 186, 229, 131, 185, 228, 184, 156, 230, 150, 185, 233, 130, 174, 231, 174, 177, 229, 141, 151, 228, 186, 172, 230, 177, 130, 232, 129, 140, 229, 143, 150, 229, 190, 151, 232, 129, 140, 228, 189, 141, 231, 155, 184, 228, 191, 161, 233, 161, 181, 233, 157, 162, 229, 136, 134, 233, 146, 159, 231, 189, 145, 233, 161, 181, 231, 161, 174, 229, 174, 154, 229, 155, 190, 228, 190, 139, 231, 189, 145, 229, 157, 128, 231, 167, 175, 230, 158, 129, 233, 148, 153, 232, 175, 175, 231, 155, 174, 231, 154, 132, 229, 174, 157, 232, 180, 157, 230, 156, 186, 229, 133, 179, 233, 163, 142, 233, 153, 169, 230, 142, 136, 230, 157, 131, 231, 151, 133, 230, 175, 146, 229, 174, 160, 231, 137, 169, 233, 153, 164, 228, 186, 134, 232, 169, 149, 232, 171, 150, 231, 150, 190, 231, 151, 133, 229, 143, 138, 230, 151, 182, 230, 177, 130, 232, 180, 173, 231, 171, 153, 231, 130, 185, 229, 132, 191, 231, 171, 165, 230, 175, 143, 229, 164, 169, 228, 184, 173, 229, 164, 174, 232, 174, 164, 232, 175, 134, 230, 175, 143, 228, 184, 170, 229, 164, 169, 230, 180, 165, 229, 173, 151, 228, 189, 147, 229, 143, 176, 231, 129, 163, 231, 187, 180, 230, 138, 164, 230, 156, 172, 233, 161, 181, 228, 184, 170, 230, 128, 167, 229, 174, 152, 230, 150, 185, 229, 184, 184, 232, 167, 129, 231, 155, 184, 230, 156, 186, 230, 136, 152, 231, 149, 165, 229, 186, 148, 229, 189, 147, 229, 190, 139, 229, 184, 136, 230, 150, 185, 228, 190, 191, 230, 160, 161, 229, 155, 173, 232, 130, 161, 229, 184, 130, 230, 136, 191, 229, 177, 139, 230, 160, 143, 231, 155, 174, 229, 145, 152, 229, 183, 165, 229, 175, 188, 232, 135, 180, 231, 170, 129, 231, 132, 182, 233, 129, 147, 229, 133, 183, 230, 156, 172, 231, 189, 145, 231, 187, 147, 229, 144, 136, 230, 161, 163, 230, 161, 136, 229, 138, 179, 229, 138, 168, 229, 143, 166, 229, 164, 150, 231, 190, 142, 229, 133, 131, 229, 188, 149, 232, 181, 183, 230, 148, 185, 229, 143, 152, 231, 172, 172, 229, 155, 155, 228, 188, 154, 232, 174, 161, 232, 170, 170, 230, 152, 142, 233, 154, 144, 231, 167, 129, 229, 174, 157, 229, 174, 157, 232, 167, 132, 232, 140, 131, 230, 182, 136, 232, 180, 185, 229, 133, 177, 229, 144, 140, 229, 191, 152, 232, 174, 176, 228, 189, 147, 231, 179, 187, 229, 184, 166, 230, 157, 165, 229, 144, 141, 229, 173, 151, 231, 153, 188, 232, 161, 168, 229, 188, 128, 230, 148, 190, 229, 138, 160, 231, 155, 159, 229, 143, 151, 229, 136, 176, 228, 186, 140, 230, 137, 139, 229, 164, 167, 233, 135, 143, 230, 136, 144, 228, 186, 186, 230, 149, 176, 233, 135, 143, 229, 133, 177, 228, 186, 171, 229, 140, 186, 229, 159, 159, 229, 165, 179, 229, 173, 169, 229, 142, 159, 229, 136, 153, 230, 137, 128, 229, 156, 168, 231, 187, 147, 230, 157, 159, 233, 128, 154, 228, 191, 161, 232, 182, 133, 231, 186, 167, 233, 133, 141, 231, 189, 174, 229, 189, 147, 230, 151, 182, 228, 188, 152, 231, 167, 128, 230, 128, 167, 230, 132, 159, 230, 136, 191, 228, 186, 167, 233, 129, 138, 230, 136, 178, 229, 135, 186, 229, 143, 163, 230, 143, 144, 228, 186, 164, 229, 176, 177, 228, 184, 154, 228, 191, 157, 229, 129, 165, 231, 168, 139, 229, 186, 166, 229, 143, 130, 230, 149, 176, 228, 186, 139, 228, 184, 154, 230, 149, 180, 228, 184, 170, 229, 177, 177, 228, 184, 156, 230, 131, 133, 230, 132, 159, 231, 137, 185, 230, 174, 138, 229, 136, 134, 233, 161, 158, 230, 144, 156, 229, 176, 139, 229, 177, 158, 228, 186, 142, 233, 151, 168, 230, 136, 183, 232, 180, 162, 229, 138, 161, 229, 163, 176, 233, 159, 179, 229, 143, 138, 229, 133, 182, 232, 180, 162, 231, 187, 143, 229, 157, 154, 230, 140, 129, 229, 185, 178, 233, 131, 168, 230, 136, 144, 231, 171, 139, 229, 136, 169, 231, 155, 138, 232, 128, 131, 232, 153, 145, 230, 136, 144, 233, 131, 189, 229, 140, 133, 232, 163, 133, 231, 148, 168, 230, 136, 182, 230, 175, 148, 232, 181, 155, 230, 150, 135, 230, 152, 142, 230, 139, 155, 229, 149, 134, 229, 174, 140, 230, 149, 180, 231, 156, 159, 230, 152, 175, 231, 156, 188, 231, 157, 155, 228, 188, 153, 228, 188, 180, 229, 168, 129, 230, 156, 155, 233, 162, 134, 229, 159, 159, 229, 141, 171, 231, 148, 159, 228, 188, 152, 230, 131, 160, 232, 171, 150, 229, 163, 135, 229, 133, 172, 229, 133, 177, 232, 137, 175, 229, 165, 189, 229, 133, 133, 229, 136, 134, 231, 172, 166, 229, 144, 136, 233, 153, 132, 228, 187, 182, 231, 137, 185, 231, 130, 185, 228, 184, 141, 229, 143, 175, 232, 139, 177, 230, 150, 135, 232, 181, 132, 228, 186, 167, 230, 160, 185, 230, 156, 172, 230, 152, 142, 230, 152, 190, 229, 175, 134, 231, 162, 188, 229, 133, 172, 228, 188, 151, 230, 176, 145, 230, 151, 143, 230, 155, 180, 229, 138, 160, 228, 186, 171, 229, 143, 151, 229, 144, 140, 229, 173, 166, 229, 144, 175, 229, 138, 168, 233, 128, 130, 229, 144, 136, 229, 142, 159, 230, 157, 165, 233, 151, 174, 231, 173, 148, 230, 156, 172, 230, 150, 135, 231, 190, 142, 233, 163, 159, 231, 187, 191, 232, 137, 178, 231, 168, 179, 229, 174, 154, 231, 187, 136, 228, 186, 142, 231, 148, 159, 231, 137, 169, 228, 190, 155, 230, 177, 130, 230, 144, 156, 231, 139, 144, 229, 138, 155, 233, 135, 143, 228, 184, 165, 233, 135, 141, 230, 176, 184, 232, 191, 156, 229, 134, 153, 231, 156, 159, 230, 156, 137, 233, 153, 144, 231, 171, 158, 228, 186, 137, 229, 175, 185, 232, 177, 161, 232, 180, 185, 231, 148, 168, 228, 184, 141, 229, 165, 189, 231, 187, 157, 229, 175, 185, 229, 141, 129, 229, 136, 134, 228, 191, 131, 232, 191, 155, 231, 130, 185, 232, 175, 132, 229, 189, 177, 233, 159, 179, 228, 188, 152, 229, 138, 191, 228, 184, 141, 229, 176, 145, 230, 172, 163, 232, 181, 143, 229, 185, 182, 228, 184, 148, 230, 156, 137, 231, 130, 185, 230, 150, 185, 229, 144, 145, 229, 133, 168, 230, 150, 176, 228, 191, 161, 231, 148, 168, 232, 174, 190, 230, 150, 189, 229, 189, 162, 232, 177, 161, 232, 181, 132, 230, 160, 188, 231, 170, 129, 231, 160, 180, 233, 154, 143, 231, 157, 128, 233, 135, 141, 229, 164, 167, 228, 186, 142, 230, 152, 175, 230, 175, 149, 228, 184, 154, 230, 153, 186, 232, 131, 189, 229, 140, 150, 229, 183, 165, 229, 174, 140, 231, 190, 142, 229, 149, 134, 229, 159, 142, 231, 187, 159, 228, 184, 128, 229, 135, 186, 231, 137, 136, 230, 137, 147, 233, 128, 160, 231, 148, 162, 229, 147, 129, 230, 166, 130, 229, 134, 181, 231, 148, 168, 228, 186, 142, 228, 191, 157, 231, 149, 153, 229, 155, 160, 231, 180, 160, 228, 184, 173, 229, 156, 139, 229, 173, 152, 229, 130, 168, 232, 180, 180, 229, 155, 190, 230, 156, 128, 230, 132, 155, 233, 149, 191, 230, 156, 159, 229, 143, 163, 228, 187, 183, 231, 144, 134, 232, 180, 162, 229, 159, 186, 229, 156, 176, 229, 174, 137, 230, 142, 146, 230, 173, 166, 230, 177, 137, 233, 135, 140, 233, 157, 162, 229, 136, 155, 229, 187, 186, 229, 164, 169, 231, 169, 186, 233, 166, 150, 229, 133, 136, 229, 174, 140, 229, 150, 132, 233, 169, 177, 229, 138, 168, 228, 184, 139, 233, 157, 162, 228, 184, 141, 229, 134, 141, 232, 175, 154, 228, 191, 161, 230, 132, 143, 228, 185, 137, 233, 152, 179, 229, 133, 137, 232, 139, 177, 229, 155, 189, 230, 188, 130, 228, 186, 174, 229, 134, 155, 228, 186, 139, 231, 142, 169, 229, 174, 182, 231, 190, 164, 228, 188, 151, 229, 134, 156, 230, 176, 145, 229, 141, 179, 229, 143, 175, 229, 144, 141, 231, 168, 177, 229, 174, 182, 229, 133, 183, 229, 138, 168, 231, 148, 187, 230, 131, 179, 229, 136, 176, 230, 179, 168, 230, 152, 142, 229, 176, 143, 229, 173, 166, 230, 128, 167, 232, 131, 189, 232, 128, 131, 231, 160, 148, 231, 161, 172, 228, 187, 182, 232, 167, 130, 231, 156, 139, 230, 184, 133, 230, 165, 154, 230, 144, 158, 231, 172, 145, 233, 166, 150, 233, 160, 129, 233, 187, 132, 233, 135, 145, 233, 128, 130, 231, 148, 168, 230, 177, 159, 232, 139, 143, 231, 156, 159, 229, 174, 158, 228, 184, 187, 231, 174, 161, 233, 152, 182, 230, 174, 181, 232, 168, 187, 229, 134, 138, 231, 191, 187, 232, 175, 145, 230, 157, 131, 229, 136, 169, 229, 129, 154, 229, 165, 189, 228, 188, 188, 228, 185, 142, 233, 128, 154, 232, 174, 175, 230, 150, 189, 229, 183, 165, 231, 139, 128, 230, 133, 139, 228, 185, 159, 232, 174, 184, 231, 142, 175, 228, 191, 157, 229, 159, 185, 229, 133, 187, 230, 166, 130, 229, 191, 181, 229, 164, 167, 229, 158, 139, 230, 156, 186, 231, 165, 168, 231, 144, 134, 232, 167, 163, 229, 140, 191, 229, 144, 141, 99, 117, 97, 110, 100, 111, 101, 110, 118, 105, 97, 114, 109, 97, 100, 114, 105, 100, 98, 117, 115, 99, 97, 114, 105, 110, 105, 99, 105, 111, 116, 105, 101, 109, 112, 111, 112, 111, 114, 113, 117, 101, 99, 117, 101, 110, 116, 97, 101, 115, 116, 97, 100, 111, 112, 117, 101, 100, 101, 110, 106, 117, 101, 103, 111, 115, 99, 111, 110, 116, 114, 97, 101, 115, 116, 195, 161, 110, 110, 111, 109, 98, 114, 101, 116, 105, 101, 110, 101, 110, 112, 101, 114, 102, 105, 108, 109, 97, 110, 101, 114, 97, 97, 109, 105, 103, 111, 115, 99, 105, 117, 100, 97, 100, 99, 101, 110, 116, 114, 111, 97, 117, 110, 113, 117, 101, 112, 117, 101, 100, 101, 115, 100, 101, 110, 116, 114, 111, 112, 114, 105, 109, 101, 114, 112, 114, 101, 99, 105, 111, 115, 101, 103, 195, 186, 110, 98, 117, 101, 110, 111, 115, 118, 111, 108, 118, 101, 114, 112, 117, 110, 116, 111, 115, 115, 101, 109, 97, 110, 97, 104, 97, 98, 195, 173, 97, 97, 103, 111, 115, 116, 111, 110, 117, 101, 118, 111, 115, 117, 110, 105, 100, 111, 115, 99, 97, 114, 108, 111, 115, 101, 113, 117, 105, 112, 111, 110, 105, 195, 177, 111, 115, 109, 117, 99, 104, 111, 115, 97, 108, 103, 117, 110, 97, 99, 111, 114, 114, 101, 111, 105, 109, 97, 103, 101, 110, 112, 97, 114, 116, 105, 114, 97, 114, 114, 105, 98, 97, 109, 97, 114, 195, 173, 97, 104, 111, 109, 98, 114, 101, 101, 109, 112, 108, 101, 111, 118, 101, 114, 100, 97, 100, 99, 97, 109, 98, 105, 111, 109, 117, 99, 104, 97, 115, 102, 117, 101, 114, 111, 110, 112, 97, 115, 97, 100, 111, 108, 195, 173, 110, 101, 97, 112, 97, 114, 101, 99, 101, 110, 117, 101, 118, 97, 115, 99, 117, 114, 115, 111, 115, 101, 115, 116, 97, 98, 97, 113, 117, 105, 101, 114, 111, 108, 105, 98, 114, 111, 115, 99, 117, 97, 110, 116, 111, 97, 99, 99, 101, 115, 111, 109, 105, 103, 117, 101, 108, 118, 97, 114, 105, 111, 115, 99, 117, 97, 116, 114, 111, 116, 105, 101, 110, 101, 115, 103, 114, 117, 112, 111, 115, 115, 101, 114, 195, 161, 110, 101, 117, 114, 111, 112, 97, 109, 101, 100, 105, 111, 115, 102, 114, 101, 110, 116, 101, 97, 99, 101, 114, 99, 97, 100, 101, 109, 195, 161, 115, 111, 102, 101, 114, 116, 97, 99, 111, 99, 104, 101, 115, 109, 111, 100, 101, 108, 111, 105, 116, 97, 108, 105, 97, 108, 101, 116, 114, 97, 115, 97, 108, 103, 195, 186, 110, 99, 111, 109, 112, 114, 97, 99, 117, 97, 108, 101, 115, 101, 120, 105, 115, 116, 101, 99, 117, 101, 114, 112, 111, 115, 105, 101, 110, 100, 111, 112, 114, 101, 110, 115, 97, 108, 108, 101, 103, 97, 114, 118, 105, 97, 106, 101, 115, 100, 105, 110, 101, 114, 111, 109, 117, 114, 99, 105, 97, 112, 111, 100, 114, 195, 161, 112, 117, 101, 115, 116, 111, 100, 105, 97, 114, 105, 111, 112, 117, 101, 98, 108, 111, 113, 117, 105, 101, 114, 101, 109, 97, 110, 117, 101, 108, 112, 114, 111, 112, 105, 111, 99, 114, 105, 115, 105, 115, 99, 105, 101, 114, 116, 111, 115, 101, 103, 117, 114, 111, 109, 117, 101, 114, 116, 101, 102, 117, 101, 110, 116, 101, 99, 101, 114, 114, 97, 114, 103, 114, 97, 110, 100, 101, 101, 102, 101, 99, 116, 111, 112, 97, 114, 116, 101, 115, 109, 101, 100, 105, 100, 97, 112, 114, 111, 112, 105, 97, 111, 102, 114, 101, 99, 101, 116, 105, 101, 114, 114, 97, 101, 45, 109, 97, 105, 108, 118, 97, 114, 105, 97, 115, 102, 111, 114, 109, 97, 115, 102, 117, 116, 117, 114, 111, 111, 98, 106, 101, 116, 111, 115, 101, 103, 117, 105, 114, 114, 105, 101, 115, 103, 111, 110, 111, 114, 109, 97, 115, 109, 105, 115, 109, 111, 115, 195, 186, 110, 105, 99, 111, 99, 97, 109, 105, 110, 111, 115, 105, 116, 105, 111, 115, 114, 97, 122, 195, 179, 110, 100, 101, 98, 105, 100, 111, 112, 114, 117, 101, 98, 97, 116, 111, 108, 101, 100, 111, 116, 101, 110, 195, 173, 97, 106, 101, 115, 195, 186, 115, 101, 115, 112, 101, 114, 111, 99, 111, 99, 105, 110, 97, 111, 114, 105, 103, 101, 110, 116, 105, 101, 110, 100, 97, 99, 105, 101, 110, 116, 111, 99, 195, 161, 100, 105, 122, 104, 97, 98, 108, 97, 114, 115, 101, 114, 195, 173, 97, 108, 97, 116, 105, 110, 97, 102, 117, 101, 114, 122, 97, 101, 115, 116, 105, 108, 111, 103, 117, 101, 114, 114, 97, 101, 110, 116, 114, 97, 114, 195, 169, 120, 105, 116, 111, 108, 195, 179, 112, 101, 122, 97, 103, 101, 110, 100, 97, 118, 195, 173, 100, 101, 111, 101, 118, 105, 116, 97, 114, 112, 97, 103, 105, 110, 97, 109, 101, 116, 114, 111, 115, 106, 97, 118, 105, 101, 114, 112, 97, 100, 114, 101, 115, 102, 195, 161, 99, 105, 108, 99, 97, 98, 101, 122, 97, 195, 161, 114, 101, 97, 115, 115, 97, 108, 105, 100, 97, 101, 110, 118, 195, 173, 111, 106, 97, 112, 195, 179, 110, 97, 98, 117, 115, 111, 115, 98, 105, 101, 110, 101, 115, 116, 101, 120, 116, 111, 115, 108, 108, 101, 118, 97, 114, 112, 117, 101, 100, 97, 110, 102, 117, 101, 114, 116, 101, 99, 111, 109, 195, 186, 110, 99, 108, 97, 115, 101, 115, 104, 117, 109, 97, 110, 111, 116, 101, 110, 105, 100, 111, 98, 105, 108, 98, 97, 111, 117, 110, 105, 100, 97, 100, 101, 115, 116, 195, 161, 115, 101, 100, 105, 116, 97, 114, 99, 114, 101, 97, 100, 111, 208, 180, 208, 187, 209, 143, 209, 135, 209, 130, 208, 190, 208, 186, 208, 176, 208, 186, 208, 184, 208, 187, 208, 184, 209, 141, 209, 130, 208, 190, 208, 178, 209, 129, 208, 181, 208, 181, 208, 179, 208, 190, 208, 191, 209, 128, 208, 184, 209, 130, 208, 176, 208, 186, 208, 181, 209, 137, 208, 181, 209, 131, 208, 182, 208, 181, 208, 154, 208, 176, 208, 186, 208, 177, 208, 181, 208, 183, 208, 177, 209, 139, 208, 187, 208, 190, 208, 189, 208, 184, 208, 146, 209, 129, 208, 181, 208, 191, 208, 190, 208, 180, 208, 173, 209, 130, 208, 190, 209, 130, 208, 190, 208, 188, 209, 135, 208, 181, 208, 188, 208, 189, 208, 181, 209, 130, 208, 187, 208, 181, 209, 130, 209, 128, 208, 176, 208, 183, 208, 190, 208, 189, 208, 176, 208, 179, 208, 180, 208, 181, 208, 188, 208, 189, 208, 181, 208, 148, 208, 187, 209, 143, 208, 159, 209, 128, 208, 184, 208, 189, 208, 176, 209, 129, 208, 189, 208, 184, 209, 133, 209, 130, 208, 181, 208, 188, 208, 186, 209, 130, 208, 190, 208, 179, 208, 190, 208, 180, 208, 178, 208, 190, 209, 130, 209, 130, 208, 176, 208, 188, 208, 161, 208, 168, 208, 144, 208, 188, 208, 176, 209, 143, 208, 167, 209, 130, 208, 190, 208, 178, 208, 176, 209, 129, 208, 178, 208, 176, 208, 188, 208, 181, 208, 188, 209, 131, 208, 162, 208, 176, 208, 186, 208, 180, 208, 178, 208, 176, 208, 189, 208, 176, 208, 188, 209, 141, 209, 130, 208, 184, 209, 141, 209, 130, 209, 131, 208, 146, 208, 176, 208, 188, 209, 130, 208, 181, 209, 133, 208, 191, 209, 128, 208, 190, 209, 130, 209, 131, 209, 130, 208, 189, 208, 176, 208, 180, 208, 180, 208, 189, 209, 143, 208, 146, 208, 190, 209, 130, 209, 130, 209, 128, 208, 184, 208, 189, 208, 181, 208, 185, 208, 146, 208, 176, 209, 129, 208, 189, 208, 184, 208, 188, 209, 129, 208, 176, 208, 188, 209, 130, 208, 190, 209, 130, 209, 128, 209, 131, 208, 177, 208, 158, 208, 189, 208, 184, 208, 188, 208, 184, 209, 128, 208, 189, 208, 181, 208, 181, 208, 158, 208, 158, 208, 158, 208, 187, 208, 184, 209, 134, 209, 141, 209, 130, 208, 176, 208, 158, 208, 189, 208, 176, 208, 189, 208, 181, 208, 188, 208, 180, 208, 190, 208, 188, 208, 188, 208, 190, 208, 185, 208, 180, 208, 178, 208, 181, 208, 190, 208, 189, 208, 190, 209, 129, 209, 131, 208, 180, 224, 164, 149, 224, 165, 135, 224, 164, 185, 224, 165, 136, 224, 164, 149, 224, 165, 128, 224, 164, 184, 224, 165, 135, 224, 164, 149, 224, 164, 190, 224, 164, 149, 224, 165, 139, 224, 164, 148, 224, 164, 176, 224, 164, 170, 224, 164, 176, 224, 164, 168, 224, 165, 135, 224, 164, 143, 224, 164, 149, 224, 164, 149, 224, 164, 191, 224, 164, 173, 224, 165, 128, 224, 164, 135, 224, 164, 184, 224, 164, 149, 224, 164, 176, 224, 164, 164, 224, 165, 139, 224, 164, 185, 224, 165, 139, 224, 164, 134, 224, 164, 170, 224, 164, 185, 224, 165, 128, 224, 164, 175, 224, 164, 185, 224, 164, 175, 224, 164, 190, 224, 164, 164, 224, 164, 149, 224, 164, 165, 224, 164, 190, 106, 97, 103, 114, 97, 110, 224, 164, 134, 224, 164, 156, 224, 164, 156, 224, 165, 139, 224, 164, 133, 224, 164, 172, 224, 164, 166, 224, 165, 139, 224, 164, 151, 224, 164, 136, 224, 164, 156, 224, 164, 190, 224, 164, 151, 224, 164, 143, 224, 164, 185, 224, 164, 174, 224, 164, 135, 224, 164, 168, 224, 164, 181, 224, 164, 185, 224, 164, 175, 224, 165, 135, 224, 164, 165, 224, 165, 135, 224, 164, 165, 224, 165, 128, 224, 164, 152, 224, 164, 176, 224, 164, 156, 224, 164, 172, 224, 164, 166, 224, 165, 128, 224, 164, 149, 224, 164, 136, 224, 164, 156, 224, 165, 128, 224, 164, 181, 224, 165, 135, 224, 164, 168, 224, 164, 136, 224, 164, 168, 224, 164, 143, 224, 164, 185, 224, 164, 176, 224, 164, 137, 224, 164, 184, 224, 164, 174, 224, 165, 135, 224, 164, 149, 224, 164, 174, 224, 164, 181, 224, 165, 139, 224, 164, 178, 224, 165, 135, 224, 164, 184, 224, 164, 172, 224, 164, 174, 224, 164, 136, 224, 164, 166, 224, 165, 135, 224, 164, 147, 224, 164, 176, 224, 164, 134, 224, 164, 174, 224, 164, 172, 224, 164, 184, 224, 164, 173, 224, 164, 176, 224, 164, 172, 224, 164, 168, 224, 164, 154, 224, 164, 178, 224, 164, 174, 224, 164, 168, 224, 164, 134, 224, 164, 151, 224, 164, 184, 224, 165, 128, 224, 164, 178, 224, 165, 128, 216, 185, 217, 132, 217, 137, 216, 165, 217, 132, 217, 137, 217, 135, 216, 176, 216, 167, 216, 162, 216, 174, 216, 177, 216, 185, 216, 175, 216, 175, 216, 167, 217, 132, 217, 137, 217, 135, 216, 176, 217, 135, 216, 181, 217, 136, 216, 177, 216, 186, 217, 138, 216, 177, 217, 131, 216, 167, 217, 134, 217, 136, 217, 132, 216, 167, 216, 168, 217, 138, 217, 134, 216, 185, 216, 177, 216, 182, 216, 176, 217, 132, 217, 131, 217, 135, 217, 134, 216, 167, 217, 138, 217, 136, 217, 133, 217, 130, 216, 167, 217, 132, 216, 185, 217, 132, 217, 138, 216, 167, 217, 134, 216, 167, 217, 132, 217, 131, 217, 134, 216, 173, 216, 170, 217, 137, 217, 130, 216, 168, 217, 132, 217, 136, 216, 173, 216, 169, 216, 167, 216, 174, 216, 177, 217, 129, 217, 130, 216, 183, 216, 185, 216, 168, 216, 175, 216, 177, 217, 131, 217, 134, 216, 165, 216, 176, 216, 167, 217, 131, 217, 133, 216, 167, 216, 167, 216, 173, 216, 175, 216, 165, 217, 132, 216, 167, 217, 129, 217, 138, 217, 135, 216, 168, 216, 185, 216, 182, 217, 131, 217, 138, 217, 129, 216, 168, 216, 173, 216, 171, 217, 136, 217, 133, 217, 134, 217, 136, 217, 135, 217, 136, 216, 163, 217, 134, 216, 167, 216, 172, 216, 175, 216, 167, 217, 132, 217, 135, 216, 167, 216, 179, 217, 132, 217, 133, 216, 185, 217, 134, 216, 175, 217, 132, 217, 138, 216, 179, 216, 185, 216, 168, 216, 177, 216, 181, 217, 132, 217, 137, 217, 133, 217, 134, 216, 176, 216, 168, 217, 135, 216, 167, 216, 163, 217, 134, 217, 135, 217, 133, 216, 171, 217, 132, 217, 131, 217, 134, 216, 170, 216, 167, 217, 132, 216, 167, 216, 173, 217, 138, 216, 171, 217, 133, 216, 181, 216, 177, 216, 180, 216, 177, 216, 173, 216, 173, 217, 136, 217, 132, 217, 136, 217, 129, 217, 138, 216, 167, 216, 176, 216, 167, 217, 132, 217, 131, 217, 132, 217, 133, 216, 177, 216, 169, 216, 167, 217, 134, 216, 170, 216, 167, 217, 132, 217, 129, 216, 163, 216, 168, 217, 136, 216, 174, 216, 167, 216, 181, 216, 163, 217, 134, 216, 170, 216, 167, 217, 134, 217, 135, 216, 167, 217, 132, 217, 138, 216, 185, 216, 182, 217, 136, 217, 136, 217, 130, 216, 175, 216, 167, 216, 168, 217, 134, 216, 174, 217, 138, 216, 177, 216, 168, 217, 134, 216, 170, 217, 132, 217, 131, 217, 133, 216, 180, 216, 167, 216, 161, 217, 136, 217, 135, 217, 138, 216, 167, 216, 168, 217, 136, 217, 130, 216, 181, 216, 181, 217, 136, 217, 133, 216, 167, 216, 177, 217, 130, 217, 133, 216, 163, 216, 173, 216, 175, 217, 134, 216, 173, 217, 134, 216, 185, 216, 175, 217, 133, 216, 177, 216, 163, 217, 138, 216, 167, 216, 173, 216, 169, 217, 131, 216, 170, 216, 168, 216, 175, 217, 136, 217, 134, 217, 138, 216, 172, 216, 168, 217, 133, 217, 134, 217, 135, 216, 170, 216, 173, 216, 170, 216, 172, 217, 135, 216, 169, 216, 179, 217, 134, 216, 169, 217, 138, 216, 170, 217, 133, 217, 131, 216, 177, 216, 169, 216, 186, 216, 178, 216, 169, 217, 134, 217, 129, 216, 179, 216, 168, 217, 138, 216, 170, 217, 132, 217, 132, 217, 135, 217, 132, 217, 134, 216, 167, 216, 170, 217, 132, 217, 131, 217, 130, 217, 132, 216, 168, 217, 132, 217, 133, 216, 167, 216, 185, 217, 134, 217, 135, 216, 163, 217, 136, 217, 132, 216, 180, 217, 138, 216, 161, 217, 134, 217, 136, 216, 177, 216, 163, 217, 133, 216, 167, 217, 129, 217, 138, 217, 131, 216, 168, 217, 131, 217, 132, 216, 176, 216, 167, 216, 170, 216, 177, 216, 170, 216, 168, 216, 168, 216, 163, 217, 134, 217, 135, 217, 133, 216, 179, 216, 167, 217, 134, 217, 131, 216, 168, 217, 138, 216, 185, 217, 129, 217, 130, 216, 175, 216, 173, 216, 179, 217, 134, 217, 132, 217, 135, 217, 133, 216, 180, 216, 185, 216, 177, 216, 163, 217, 135, 217, 132, 216, 180, 217, 135, 216, 177, 217, 130, 216, 183, 216, 177, 216, 183, 217, 132, 216, 168, 112, 114, 111, 102, 105, 108, 101, 115, 101, 114, 118, 105, 99, 101, 100, 101, 102, 97, 117, 108, 116, 104, 105, 109, 115, 101, 108, 102, 100, 101, 116, 97, 105, 108, 115, 99, 111, 110, 116, 101, 110, 116, 115, 117, 112, 112, 111, 114, 116, 115, 116, 97, 114, 116, 101, 100, 109, 101, 115, 115, 97, 103, 101, 115, 117, 99, 99, 101, 115, 115, 102, 97, 115, 104, 105, 111, 110, 60, 116, 105, 116, 108, 101, 62, 99, 111, 117, 110, 116, 114, 121, 97, 99, 99, 111, 117, 110, 116, 99, 114, 101, 97, 116, 101, 100, 115, 116, 111, 114, 105, 101, 115, 114, 101, 115, 117, 108, 116, 115, 114, 117, 110, 110, 105, 110, 103, 112, 114, 111, 99, 101, 115, 115, 119, 114, 105, 116, 105, 110, 103, 111, 98, 106, 101, 99, 116, 115, 118, 105, 115, 105, 98, 108, 101, 119, 101, 108, 99, 111, 109, 101, 97, 114, 116, 105, 99, 108, 101, 117, 110, 107, 110, 111, 119, 110, 110, 101, 116, 119, 111, 114, 107, 99, 111, 109, 112, 97, 110, 121, 100, 121, 110, 97, 109, 105, 99, 98, 114, 111, 119, 115, 101, 114, 112, 114, 105, 118, 97, 99, 121, 112, 114, 111, 98, 108, 101, 109, 83, 101, 114, 118, 105, 99, 101, 114, 101, 115, 112, 101, 99, 116, 100, 105, 115, 112, 108, 97, 121, 114, 101, 113, 117, 101, 115, 116, 114, 101, 115, 101, 114, 118, 101, 119, 101, 98, 115, 105, 116, 101, 104, 105, 115, 116, 111, 114, 121, 102, 114, 105, 101, 110, 100, 115, 111, 112, 116, 105, 111, 110, 115, 119, 111, 114, 107, 105, 110, 103, 118, 101, 114, 115, 105, 111, 110, 109, 105, 108, 108, 105, 111, 110, 99, 104, 97, 110, 110, 101, 108, 119, 105, 110, 100, 111, 119, 46, 97, 100, 100, 114, 101, 115, 115, 118, 105, 115, 105, 116, 101, 100, 119, 101, 97, 116, 104, 101, 114, 99, 111, 114, 114, 101, 99, 116, 112, 114, 111, 100, 117, 99, 116, 101, 100, 105, 114, 101, 99, 116, 102, 111, 114, 119, 97, 114, 100, 121, 111, 117, 32, 99, 97, 110, 114, 101, 109, 111, 118, 101, 100, 115, 117, 98, 106, 101, 99, 116, 99, 111, 110, 116, 114, 111, 108, 97, 114, 99, 104, 105, 118, 101, 99, 117, 114, 114, 101, 110, 116, 114, 101, 97, 100, 105, 110, 103, 108, 105, 98, 114, 97, 114, 121, 108, 105, 109, 105, 116, 101, 100, 109, 97, 110, 97, 103, 101, 114, 102, 117, 114, 116, 104, 101, 114, 115, 117, 109, 109, 97, 114, 121, 109, 97, 99, 104, 105, 110, 101, 109, 105, 110, 117, 116, 101, 115, 112, 114, 105, 118, 97, 116, 101, 99, 111, 110, 116, 101, 120, 116, 112, 114, 111, 103, 114, 97, 109, 115, 111, 99, 105, 101, 116, 121, 110, 117, 109, 98, 101, 114, 115, 119, 114, 105, 116, 116, 101, 110, 101, 110, 97, 98, 108, 101, 100, 116, 114, 105, 103, 103, 101, 114, 115, 111, 117, 114, 99, 101, 115, 108, 111, 97, 100, 105, 110, 103, 101, 108, 101, 109, 101, 110, 116, 112, 97, 114, 116, 110, 101, 114, 102, 105, 110, 97, 108, 108, 121, 112, 101, 114, 102, 101, 99, 116, 109, 101, 97, 110, 105, 110, 103, 115, 121, 115, 116, 101, 109, 115, 107, 101, 101, 112, 105, 110, 103, 99, 117, 108, 116, 117, 114, 101, 38, 113, 117, 111, 116, 59, 44, 106, 111, 117, 114, 110, 97, 108, 112, 114, 111, 106, 101, 99, 116, 115, 117, 114, 102, 97, 99, 101, 115, 38, 113, 117, 111, 116, 59, 101, 120, 112, 105, 114, 101, 115, 114, 101, 118, 105, 101, 119, 115, 98, 97, 108, 97, 110, 99, 101, 69, 110, 103, 108, 105, 115, 104, 67, 111, 110, 116, 101, 110, 116, 116, 104, 114, 111, 117, 103, 104, 80, 108, 101, 97, 115, 101, 32, 111, 112, 105, 110, 105, 111, 110, 99, 111, 110, 116, 97, 99, 116, 97, 118, 101, 114, 97, 103, 101, 112, 114, 105, 109, 97, 114, 121, 118, 105, 108, 108, 97, 103, 101, 83, 112, 97, 110, 105, 115, 104, 103, 97, 108, 108, 101, 114, 121, 100, 101, 99, 108, 105, 110, 101, 109, 101, 101, 116, 105, 110, 103, 109, 105, 115, 115, 105, 111, 110, 112, 111, 112, 117, 108, 97, 114, 113, 117, 97, 108, 105, 116, 121, 109, 101, 97, 115, 117, 114, 101, 103, 101, 110, 101, 114, 97, 108, 115, 112, 101, 99, 105, 101, 115, 115, 101, 115, 115, 105, 111, 110, 115, 101, 99, 116, 105, 111, 110, 119, 114, 105, 116, 101, 114, 115, 99, 111, 117, 110, 116, 101, 114, 105, 110, 105, 116, 105, 97, 108, 114, 101, 112, 111, 114, 116, 115, 102, 105, 103, 117, 114, 101, 115, 109, 101, 109, 98, 101, 114, 115, 104, 111, 108, 100, 105, 110, 103, 100, 105, 115, 112, 117, 116, 101, 101, 97, 114, 108, 105, 101, 114, 101, 120, 112, 114, 101, 115, 115, 100, 105, 103, 105, 116, 97, 108, 112, 105, 99, 116, 117, 114, 101, 65, 110, 111, 116, 104, 101, 114, 109, 97, 114, 114, 105, 101, 100, 116, 114, 97, 102, 102, 105, 99, 108, 101, 97, 100, 105, 110, 103, 99, 104, 97, 110, 103, 101, 100, 99, 101, 110, 116, 114, 97, 108, 118, 105, 99, 116, 111, 114, 121, 105, 109, 97, 103, 101, 115, 47, 114, 101, 97, 115, 111, 110, 115, 115, 116, 117, 100, 105, 101, 115, 102, 101, 97, 116, 117, 114, 101, 108, 105, 115, 116, 105, 110, 103, 109, 117, 115, 116, 32, 98, 101, 115, 99, 104, 111, 111, 108, 115, 86, 101, 114, 115, 105, 111, 110, 117, 115, 117, 97, 108, 108, 121, 101, 112, 105, 115, 111, 100, 101, 112, 108, 97, 121, 105, 110, 103, 103, 114, 111, 119, 105, 110, 103, 111, 98, 118, 105, 111, 117, 115, 111, 118, 101, 114, 108, 97, 121, 112, 114, 101, 115, 101, 110, 116, 97, 99, 116, 105, 111, 110, 115, 60, 47, 117, 108, 62, 13, 10, 119, 114, 97, 112, 112, 101, 114, 97, 108, 114, 101, 97, 100, 121, 99, 101, 114, 116, 97, 105, 110, 114, 101, 97, 108, 105, 116, 121, 115, 116, 111, 114, 97, 103, 101, 97, 110, 111, 116, 104, 101, 114, 100, 101, 115, 107, 116, 111, 112, 111, 102, 102, 101, 114, 101, 100, 112, 97, 116, 116, 101, 114, 110, 117, 110, 117, 115, 117, 97, 108, 68, 105, 103, 105, 116, 97, 108, 99, 97, 112, 105, 116, 97, 108, 87, 101, 98, 115, 105, 116, 101, 102, 97, 105, 108, 117, 114, 101, 99, 111, 110, 110, 101, 99, 116, 114, 101, 100, 117, 99, 101, 100, 65, 110, 100, 114, 111, 105, 100, 100, 101, 99, 97, 100, 101, 115, 114, 101, 103, 117, 108, 97, 114, 32, 38, 97, 109, 112, 59, 32, 97, 110, 105, 109, 97, 108, 115, 114, 101, 108, 101, 97, 115, 101, 65, 117, 116, 111, 109, 97, 116, 103, 101, 116, 116, 105, 110, 103, 109, 101, 116, 104, 111, 100, 115, 110, 111, 116, 104, 105, 110, 103, 80, 111, 112, 117, 108, 97, 114, 99, 97, 112, 116, 105, 111, 110, 108, 101, 116, 116, 101, 114, 115, 99, 97, 112, 116, 117, 114, 101, 115, 99, 105, 101, 110, 99, 101, 108, 105, 99, 101, 110, 115, 101, 99, 104, 97, 110, 103, 101, 115, 69, 110, 103, 108, 97, 110, 100, 61, 49, 38, 97, 109, 112, 59, 72, 105, 115, 116, 111, 114, 121, 32, 61, 32, 110, 101, 119, 32, 67, 101, 110, 116, 114, 97, 108, 117, 112, 100, 97, 116, 101, 100, 83, 112, 101, 99, 105, 97, 108, 78, 101, 116, 119, 111, 114, 107, 114, 101, 113, 117, 105, 114, 101, 99, 111, 109, 109, 101, 110, 116, 119, 97, 114, 110, 105, 110, 103, 67, 111, 108, 108, 101, 103, 101, 116, 111, 111, 108, 98, 97, 114, 114, 101, 109, 97, 105, 110, 115, 98, 101, 99, 97, 117, 115, 101, 101, 108, 101, 99, 116, 101, 100, 68, 101, 117, 116, 115, 99, 104, 102, 105, 110, 97, 110, 99, 101, 119, 111, 114, 107, 101, 114, 115, 113, 117, 105, 99, 107, 108, 121, 98, 101, 116, 119, 101, 101, 110, 101, 120, 97, 99, 116, 108, 121, 115, 101, 116, 116, 105, 110, 103, 100, 105, 115, 101, 97, 115, 101, 83, 111, 99, 105, 101, 116, 121, 119, 101, 97, 112, 111, 110, 115, 101, 120, 104, 105, 98, 105, 116, 38, 108, 116, 59, 33, 45, 45, 67, 111, 110, 116, 114, 111, 108, 99, 108, 97, 115, 115, 101, 115, 99, 111, 118, 101, 114, 101, 100, 111, 117, 116, 108, 105, 110, 101, 97, 116, 116, 97, 99, 107, 115, 100, 101, 118, 105, 99, 101, 115, 40, 119, 105, 110, 100, 111, 119, 112, 117, 114, 112, 111, 115, 101, 116, 105, 116, 108, 101, 61, 34, 77, 111, 98, 105, 108, 101, 32, 107, 105, 108, 108, 105, 110, 103, 115, 104, 111, 119, 105, 110, 103, 73, 116, 97, 108, 105, 97, 110, 100, 114, 111, 112, 112, 101, 100, 104, 101, 97, 118, 105, 108, 121, 101, 102, 102, 101, 99, 116, 115, 45, 49, 39, 93, 41, 59, 10, 99, 111, 110, 102, 105, 114, 109, 67, 117, 114, 114, 101, 110, 116, 97, 100, 118, 97, 110, 99, 101, 115, 104, 97, 114, 105, 110, 103, 111, 112, 101, 110, 105, 110, 103, 100, 114, 97, 119, 105, 110, 103, 98, 105, 108, 108, 105, 111, 110, 111, 114, 100, 101, 114, 101, 100, 71, 101, 114, 109, 97, 110, 121, 114, 101, 108, 97, 116, 101, 100, 60, 47, 102, 111, 114, 109, 62, 105, 110, 99, 108, 117, 100, 101, 119, 104, 101, 116, 104, 101, 114, 100, 101, 102, 105, 110, 101, 100, 83, 99, 105, 101, 110, 99, 101, 99, 97, 116, 97, 108, 111, 103, 65, 114, 116, 105, 99, 108, 101, 98, 117, 116, 116, 111, 110, 115, 108, 97, 114, 103, 101, 115, 116, 117, 110, 105, 102, 111, 114, 109, 106, 111, 117, 114, 110, 101, 121, 115, 105, 100, 101, 98, 97, 114, 67, 104, 105, 99, 97, 103, 111, 104, 111, 108, 105, 100, 97, 121, 71, 101, 110, 101, 114, 97, 108, 112, 97, 115, 115, 97, 103, 101, 44, 38, 113, 117, 111, 116, 59, 97, 110, 105, 109, 97, 116, 101, 102, 101, 101, 108, 105, 110, 103, 97, 114, 114, 105, 118, 101, 100, 112, 97, 115, 115, 105, 110, 103, 110, 97, 116, 117, 114, 97, 108, 114, 111, 117, 103, 104, 108, 121, 46, 10, 10, 84, 104, 101, 32, 98, 117, 116, 32, 110, 111, 116, 100, 101, 110, 115, 105, 116, 121, 66, 114, 105, 116, 97, 105, 110, 67, 104, 105, 110, 101, 115, 101, 108, 97, 99, 107, 32, 111, 102, 116, 114, 105, 98, 117, 116, 101, 73, 114, 101, 108, 97, 110, 100, 34, 32, 100, 97, 116, 97, 45, 102, 97, 99, 116, 111, 114, 115, 114, 101, 99, 101, 105, 118, 101, 116, 104, 97, 116, 32, 105, 115, 76, 105, 98, 114, 97, 114, 121, 104, 117, 115, 98, 97, 110, 100, 105, 110, 32, 102, 97, 99, 116, 97, 102, 102, 97, 105, 114, 115, 67, 104, 97, 114, 108, 101, 115, 114, 97, 100, 105, 99, 97, 108, 98, 114, 111, 117, 103, 104, 116, 102, 105, 110, 100, 105, 110, 103, 108, 97, 110, 100, 105, 110, 103, 58, 108, 97, 110, 103, 61, 34, 114, 101, 116, 117, 114, 110, 32, 108, 101, 97, 100, 101, 114, 115, 112, 108, 97, 110, 110, 101, 100, 112, 114, 101, 109, 105, 117, 109, 112, 97, 99, 107, 97, 103, 101, 65, 109, 101, 114, 105, 99, 97, 69, 100, 105, 116, 105, 111, 110, 93, 38, 113, 117, 111, 116, 59, 77, 101, 115, 115, 97, 103, 101, 110, 101, 101, 100, 32, 116, 111, 118, 97, 108, 117, 101, 61, 34, 99, 111, 109, 112, 108, 101, 120, 108, 111, 111, 107, 105, 110, 103, 115, 116, 97, 116, 105, 111, 110, 98, 101, 108, 105, 101, 118, 101, 115, 109, 97, 108, 108, 101, 114, 45, 109, 111, 98, 105, 108, 101, 114, 101, 99, 111, 114, 100, 115, 119, 97, 110, 116, 32, 116, 111, 107, 105, 110, 100, 32, 111, 102, 70, 105, 114, 101, 102, 111, 120, 121, 111, 117, 32, 97, 114, 101, 115, 105, 109, 105, 108, 97, 114, 115, 116, 117, 100, 105, 101, 100, 109, 97, 120, 105, 109, 117, 109, 104, 101, 97, 100, 105, 110, 103, 114, 97, 112, 105, 100, 108, 121, 99, 108, 105, 109, 97, 116, 101, 107, 105, 110, 103, 100, 111, 109, 101, 109, 101, 114, 103, 101, 100, 97, 109, 111, 117, 110, 116, 115, 102, 111, 117, 110, 100, 101, 100, 112, 105, 111, 110, 101, 101, 114, 102, 111, 114, 109, 117, 108, 97, 100, 121, 110, 97, 115, 116, 121, 104, 111, 119, 32, 116, 111, 32, 83, 117, 112, 112, 111, 114, 116, 114, 101, 118, 101, 110, 117, 101, 101, 99, 111, 110, 111, 109, 121, 82, 101, 115, 117, 108, 116, 115, 98, 114, 111, 116, 104, 101, 114, 115, 111, 108, 100, 105, 101, 114, 108, 97, 114, 103, 101, 108, 121, 99, 97, 108, 108, 105, 110, 103, 46, 38, 113, 117, 111, 116, 59, 65, 99, 99, 111, 117, 110, 116, 69, 100, 119, 97, 114, 100, 32, 115, 101, 103, 109, 101, 110, 116, 82, 111, 98, 101, 114, 116, 32, 101, 102, 102, 111, 114, 116, 115, 80, 97, 99, 105, 102, 105, 99, 108, 101, 97, 114, 110, 101, 100, 117, 112, 32, 119, 105, 116, 104, 104, 101, 105, 103, 104, 116, 58, 119, 101, 32, 104, 97, 118, 101, 65, 110, 103, 101, 108, 101, 115, 110, 97, 116, 105, 111, 110, 115, 95, 115, 101, 97, 114, 99, 104, 97, 112, 112, 108, 105, 101, 100, 97, 99, 113, 117, 105, 114, 101, 109, 97, 115, 115, 105, 118, 101, 103, 114, 97, 110, 116, 101, 100, 58, 32, 102, 97, 108, 115, 101, 116, 114, 101, 97, 116, 101, 100, 98, 105, 103, 103, 101, 115, 116, 98, 101, 110, 101, 102, 105, 116, 100, 114, 105, 118, 105, 110, 103, 83, 116, 117, 100, 105, 101, 115, 109, 105, 110, 105, 109, 117, 109, 112, 101, 114, 104, 97, 112, 115, 109, 111, 114, 110, 105, 110, 103, 115, 101, 108, 108, 105, 110, 103, 105, 115, 32, 117, 115, 101, 100, 114, 101, 118, 101, 114, 115, 101, 118, 97, 114, 105, 97, 110, 116, 32, 114, 111, 108, 101, 61, 34, 109, 105, 115, 115, 105, 110, 103, 97, 99, 104, 105, 101, 118, 101, 112, 114, 111, 109, 111, 116, 101, 115, 116, 117, 100, 101, 110, 116, 115, 111, 109, 101, 111, 110, 101, 101, 120, 116, 114, 101, 109, 101, 114, 101, 115, 116, 111, 114, 101, 98, 111, 116, 116, 111, 109, 58, 101, 118, 111, 108, 118, 101, 100, 97, 108, 108, 32, 116, 104, 101, 115, 105, 116, 101, 109, 97, 112, 101, 110, 103, 108, 105, 115, 104, 119, 97, 121, 32, 116, 111, 32, 32, 65, 117, 103, 117, 115, 116, 115, 121, 109, 98, 111, 108, 115, 67, 111, 109, 112, 97, 110, 121, 109, 97, 116, 116, 101, 114, 115, 109, 117, 115, 105, 99, 97, 108, 97, 103, 97, 105, 110, 115, 116, 115, 101, 114, 118, 105, 110, 103, 125, 41, 40, 41, 59, 13, 10, 112, 97, 121, 109, 101, 110, 116, 116, 114, 111, 117, 98, 108, 101, 99, 111, 110, 99, 101, 112, 116, 99, 111, 109, 112, 97, 114, 101, 112, 97, 114, 101, 110, 116, 115, 112, 108, 97, 121, 101, 114, 115, 114, 101, 103, 105, 111, 110, 115, 109, 111, 110, 105, 116, 111, 114, 32, 39, 39, 84, 104, 101, 32, 119, 105, 110, 110, 105, 110, 103, 101, 120, 112, 108, 111, 114, 101, 97, 100, 97, 112, 116, 101, 100, 71, 97, 108, 108, 101, 114, 121, 112, 114, 111, 100, 117, 99, 101, 97, 98, 105, 108, 105, 116, 121, 101, 110, 104, 97, 110, 99, 101, 99, 97, 114, 101, 101, 114, 115, 41, 46, 32, 84, 104, 101, 32, 99, 111, 108, 108, 101, 99, 116, 83, 101, 97, 114, 99, 104, 32, 97, 110, 99, 105, 101, 110, 116, 101, 120, 105, 115, 116, 101, 100, 102, 111, 111, 116, 101, 114, 32, 104, 97, 110, 100, 108, 101, 114, 112, 114, 105, 110, 116, 101, 100, 99, 111, 110, 115, 111, 108, 101, 69, 97, 115, 116, 101, 114, 110, 101, 120, 112, 111, 114, 116, 115, 119, 105, 110, 100, 111, 119, 115, 67, 104, 97, 110, 110, 101, 108, 105, 108, 108, 101, 103, 97, 108, 110, 101, 117, 116, 114, 97, 108, 115, 117, 103, 103, 101, 115, 116, 95, 104, 101, 97, 100, 101, 114, 115, 105, 103, 110, 105, 110, 103, 46, 104, 116, 109, 108, 34, 62, 115, 101, 116, 116, 108, 101, 100, 119, 101, 115, 116, 101, 114, 110, 99, 97, 117, 115, 105, 110, 103, 45, 119, 101, 98, 107, 105, 116, 99, 108, 97, 105, 109, 101, 100, 74, 117, 115, 116, 105, 99, 101, 99, 104, 97, 112, 116, 101, 114, 118, 105, 99, 116, 105, 109, 115, 84, 104, 111, 109, 97, 115, 32, 109, 111, 122, 105, 108, 108, 97, 112, 114, 111, 109, 105, 115, 101, 112, 97, 114, 116, 105, 101, 115, 101, 100, 105, 116, 105, 111, 110, 111, 117, 116, 115, 105, 100, 101, 58, 102, 97, 108, 115, 101, 44, 104, 117, 110, 100, 114, 101, 100, 79, 108, 121, 109, 112, 105, 99, 95, 98, 117, 116, 116, 111, 110, 97, 117, 116, 104, 111, 114, 115, 114, 101, 97, 99, 104, 101, 100, 99, 104, 114, 111, 110, 105, 99, 100, 101, 109, 97, 110, 100, 115, 115, 101, 99, 111, 110, 100, 115, 112, 114, 111, 116, 101, 99, 116, 97, 100, 111, 112, 116, 101, 100, 112, 114, 101, 112, 97, 114, 101, 110, 101, 105, 116, 104, 101, 114, 103, 114, 101, 97, 116, 108, 121, 103, 114, 101, 97, 116, 101, 114, 111, 118, 101, 114, 97, 108, 108, 105, 109, 112, 114, 111, 118, 101, 99, 111, 109, 109, 97, 110, 100, 115, 112, 101, 99, 105, 97, 108, 115, 101, 97, 114, 99, 104, 46, 119, 111, 114, 115, 104, 105, 112, 102, 117, 110, 100, 105, 110, 103, 116, 104, 111, 117, 103, 104, 116, 104, 105, 103, 104, 101, 115, 116, 105, 110, 115, 116, 101, 97, 100, 117, 116, 105, 108, 105, 116, 121, 113, 117, 97, 114, 116, 101, 114, 67, 117, 108, 116, 117, 114, 101, 116, 101, 115, 116, 105, 110, 103, 99, 108, 101, 97, 114, 108, 121, 101, 120, 112, 111, 115, 101, 100, 66, 114, 111, 119, 115, 101, 114, 108, 105, 98, 101, 114, 97, 108, 125, 32, 99, 97, 116, 99, 104, 80, 114, 111, 106, 101, 99, 116, 101, 120, 97, 109, 112, 108, 101, 104, 105, 100, 101, 40, 41, 59, 70, 108, 111, 114, 105, 100, 97, 97, 110, 115, 119, 101, 114, 115, 97, 108, 108, 111, 119, 101, 100, 69, 109, 112, 101, 114, 111, 114, 100, 101, 102, 101, 110, 115, 101, 115, 101, 114, 105, 111, 117, 115, 102, 114, 101, 101, 100, 111, 109, 83, 101, 118, 101, 114, 97, 108, 45, 98, 117, 116, 116, 111, 110, 70, 117, 114, 116, 104, 101, 114, 111, 117, 116, 32, 111, 102, 32, 33, 61, 32, 110, 117, 108, 108, 116, 114, 97, 105, 110, 101, 100, 68, 101, 110, 109, 97, 114, 107, 118, 111, 105, 100, 40, 48, 41, 47, 97, 108, 108, 46, 106, 115, 112, 114, 101, 118, 101, 110, 116, 82, 101, 113, 117, 101, 115, 116, 83, 116, 101, 112, 104, 101, 110, 10, 10, 87, 104, 101, 110, 32, 111, 98, 115, 101, 114, 118, 101, 60, 47, 104, 50, 62, 13, 10, 77, 111, 100, 101, 114, 110, 32, 112, 114, 111, 118, 105, 100, 101, 34, 32, 97, 108, 116, 61, 34, 98, 111, 114, 100, 101, 114, 115, 46, 10, 10, 70, 111, 114, 32, 10, 10, 77, 97, 110, 121, 32, 97, 114, 116, 105, 115, 116, 115, 112, 111, 119, 101, 114, 101, 100, 112, 101, 114, 102, 111, 114, 109, 102, 105, 99, 116, 105, 111, 110, 116, 121, 112, 101, 32, 111, 102, 109, 101, 100, 105, 99, 97, 108, 116, 105, 99, 107, 101, 116, 115, 111, 112, 112, 111, 115, 101, 100, 67, 111, 117, 110, 99, 105, 108, 119, 105, 116, 110, 101, 115, 115, 106, 117, 115, 116, 105, 99, 101, 71, 101, 111, 114, 103, 101, 32, 66, 101, 108, 103, 105, 117, 109, 46, 46, 46, 60, 47, 97, 62, 116, 119, 105, 116, 116, 101, 114, 110, 111, 116, 97, 98, 108, 121, 119, 97, 105, 116, 105, 110, 103, 119, 97, 114, 102, 97, 114, 101, 32, 79, 116, 104, 101, 114, 32, 114, 97, 110, 107, 105, 110, 103, 112, 104, 114, 97, 115, 101, 115, 109, 101, 110, 116, 105, 111, 110, 115, 117, 114, 118, 105, 118, 101, 115, 99, 104, 111, 108, 97, 114, 60, 47, 112, 62, 13, 10, 32, 67, 111, 117, 110, 116, 114, 121, 105, 103, 110, 111, 114, 101, 100, 108, 111, 115, 115, 32, 111, 102, 106, 117, 115, 116, 32, 97, 115, 71, 101, 111, 114, 103, 105, 97, 115, 116, 114, 97, 110, 103, 101, 60, 104, 101, 97, 100, 62, 60, 115, 116, 111, 112, 112, 101, 100, 49, 39, 93, 41, 59, 13, 10, 105, 115, 108, 97, 110, 100, 115, 110, 111, 116, 97, 98, 108, 101, 98, 111, 114, 100, 101, 114, 58, 108, 105, 115, 116, 32, 111, 102, 99, 97, 114, 114, 105, 101, 100, 49, 48, 48, 44, 48, 48, 48, 60, 47, 104, 51, 62, 10, 32, 115, 101, 118, 101, 114, 97, 108, 98, 101, 99, 111, 109, 101, 115, 115, 101, 108, 101, 99, 116, 32, 119, 101, 100, 100, 105, 110, 103, 48, 48, 46, 104, 116, 109, 108, 109, 111, 110, 97, 114, 99, 104, 111, 102, 102, 32, 116, 104, 101, 116, 101, 97, 99, 104, 101, 114, 104, 105, 103, 104, 108, 121, 32, 98, 105, 111, 108, 111, 103, 121, 108, 105, 102, 101, 32, 111, 102, 111, 114, 32, 101, 118, 101, 110, 114, 105, 115, 101, 32, 111, 102, 38, 114, 97, 113, 117, 111, 59, 112, 108, 117, 115, 111, 110, 101, 104, 117, 110, 116, 105, 110, 103, 40, 116, 104, 111, 117, 103, 104, 68, 111, 117, 103, 108, 97, 115, 106, 111, 105, 110, 105, 110, 103, 99, 105, 114, 99, 108, 101, 115, 70, 111, 114, 32, 116, 104, 101, 65, 110, 99, 105, 101, 110, 116, 86, 105, 101, 116, 110, 97, 109, 118, 101, 104, 105, 99, 108, 101, 115, 117, 99, 104, 32, 97, 115, 99, 114, 121, 115, 116, 97, 108, 118, 97, 108, 117, 101, 32, 61, 87, 105, 110, 100, 111, 119, 115, 101, 110, 106, 111, 121, 101, 100, 97, 32, 115, 109, 97, 108, 108, 97, 115, 115, 117, 109, 101, 100, 60, 97, 32, 105, 100, 61, 34, 102, 111, 114, 101, 105, 103, 110, 32, 65, 108, 108, 32, 114, 105, 104, 111, 119, 32, 116, 104, 101, 68, 105, 115, 112, 108, 97, 121, 114, 101, 116, 105, 114, 101, 100, 104, 111, 119, 101, 118, 101, 114, 104, 105, 100, 100, 101, 110, 59, 98, 97, 116, 116, 108, 101, 115, 115, 101, 101, 107, 105, 110, 103, 99, 97, 98, 105, 110, 101, 116, 119, 97, 115, 32, 110, 111, 116, 108, 111, 111, 107, 32, 97, 116, 99, 111, 110, 100, 117, 99, 116, 103, 101, 116, 32, 116, 104, 101, 74, 97, 110, 117, 97, 114, 121, 104, 97, 112, 112, 101, 110, 115, 116, 117, 114, 110, 105, 110, 103, 97, 58, 104, 111, 118, 101, 114, 79, 110, 108, 105, 110, 101, 32, 70, 114, 101, 110, 99, 104, 32, 108, 97, 99, 107, 105, 110, 103, 116, 121, 112, 105, 99, 97, 108, 101, 120, 116, 114, 97, 99, 116, 101, 110, 101, 109, 105, 101, 115, 101, 118, 101, 110, 32, 105, 102, 103, 101, 110, 101, 114, 97, 116, 100, 101, 99, 105, 100, 101, 100, 97, 114, 101, 32, 110, 111, 116, 47, 115, 101, 97, 114, 99, 104, 98, 101, 108, 105, 101, 102, 115, 45, 105, 109, 97, 103, 101, 58, 108, 111, 99, 97, 116, 101, 100, 115, 116, 97, 116, 105, 99, 46, 108, 111, 103, 105, 110, 34, 62, 99, 111, 110, 118, 101, 114, 116, 118, 105, 111, 108, 101, 110, 116, 101, 110, 116, 101, 114, 101, 100, 102, 105, 114, 115, 116, 34, 62, 99, 105, 114, 99, 117, 105, 116, 70, 105, 110, 108, 97, 110, 100, 99, 104, 101, 109, 105, 115, 116, 115, 104, 101, 32, 119, 97, 115, 49, 48, 112, 120, 59, 34, 62, 97, 115, 32, 115, 117, 99, 104, 100, 105, 118, 105, 100, 101, 100, 60, 47, 115, 112, 97, 110, 62, 119, 105, 108, 108, 32, 98, 101, 108, 105, 110, 101, 32, 111, 102, 97, 32, 103, 114, 101, 97, 116, 109, 121, 115, 116, 101, 114, 121, 47, 105, 110, 100, 101, 120, 46, 102, 97, 108, 108, 105, 110, 103, 100, 117, 101, 32, 116, 111, 32, 114, 97, 105, 108, 119, 97, 121, 99, 111, 108, 108, 101, 103, 101, 109, 111, 110, 115, 116, 101, 114, 100, 101, 115, 99, 101, 110, 116, 105, 116, 32, 119, 105, 116, 104, 110, 117, 99, 108, 101, 97, 114, 74, 101, 119, 105, 115, 104, 32, 112, 114, 111, 116, 101, 115, 116, 66, 114, 105, 116, 105, 115, 104, 102, 108, 111, 119, 101, 114, 115, 112, 114, 101, 100, 105, 99, 116, 114, 101, 102, 111, 114, 109, 115, 98, 117, 116, 116, 111, 110, 32, 119, 104, 111, 32, 119, 97, 115, 108, 101, 99, 116, 117, 114, 101, 105, 110, 115, 116, 97, 110, 116, 115, 117, 105, 99, 105, 100, 101, 103, 101, 110, 101, 114, 105, 99, 112, 101, 114, 105, 111, 100, 115, 109, 97, 114, 107, 101, 116, 115, 83, 111, 99, 105, 97, 108, 32, 102, 105, 115, 104, 105, 110, 103, 99, 111, 109, 98, 105, 110, 101, 103, 114, 97, 112, 104, 105, 99, 119, 105, 110, 110, 101, 114, 115, 60, 98, 114, 32, 47, 62, 60, 98, 121, 32, 116, 104, 101, 32, 78, 97, 116, 117, 114, 97, 108, 80, 114, 105, 118, 97, 99, 121, 99, 111, 111, 107, 105, 101, 115, 111, 117, 116, 99, 111, 109, 101, 114, 101, 115, 111, 108, 118, 101, 83, 119, 101, 100, 105, 115, 104, 98, 114, 105, 101, 102, 108, 121, 80, 101, 114, 115, 105, 97, 110, 115, 111, 32, 109, 117, 99, 104, 67, 101, 110, 116, 117, 114, 121, 100, 101, 112, 105, 99, 116, 115, 99, 111, 108, 117, 109, 110, 115, 104, 111, 117, 115, 105, 110, 103, 115, 99, 114, 105, 112, 116, 115, 110, 101, 120, 116, 32, 116, 111, 98, 101, 97, 114, 105, 110, 103, 109, 97, 112, 112, 105, 110, 103, 114, 101, 118, 105, 115, 101, 100, 106, 81, 117, 101, 114, 121, 40, 45, 119, 105, 100, 116, 104, 58, 116, 105, 116, 108, 101, 34, 62, 116, 111, 111, 108, 116, 105, 112, 83, 101, 99, 116, 105, 111, 110, 100, 101, 115, 105, 103, 110, 115, 84, 117, 114, 107, 105, 115, 104, 121, 111, 117, 110, 103, 101, 114, 46, 109, 97, 116, 99, 104, 40, 125, 41, 40, 41, 59, 10, 10, 98, 117, 114, 110, 105, 110, 103, 111, 112, 101, 114, 97, 116, 101, 100, 101, 103, 114, 101, 101, 115, 115, 111, 117, 114, 99, 101, 61, 82, 105, 99, 104, 97, 114, 100, 99, 108, 111, 115, 101, 108, 121, 112, 108, 97, 115, 116, 105, 99, 101, 110, 116, 114, 105, 101, 115, 60, 47, 116, 114, 62, 13, 10, 99, 111, 108, 111, 114, 58, 35, 117, 108, 32, 105, 100, 61, 34, 112, 111, 115, 115, 101, 115, 115, 114, 111, 108, 108, 105, 110, 103, 112, 104, 121, 115, 105, 99, 115, 102, 97, 105, 108, 105, 110, 103, 101, 120, 101, 99, 117, 116, 101, 99, 111, 110, 116, 101, 115, 116, 108, 105, 110, 107, 32, 116, 111, 68, 101, 102, 97, 117, 108, 116, 60, 98, 114, 32, 47, 62, 10, 58, 32, 116, 114, 117, 101, 44, 99, 104, 97, 114, 116, 101, 114, 116, 111, 117, 114, 105, 115, 109, 99, 108, 97, 115, 115, 105, 99, 112, 114, 111, 99, 101, 101, 100, 101, 120, 112, 108, 97, 105, 110, 60, 47, 104, 49, 62, 13, 10, 111, 110, 108, 105, 110, 101, 46, 63, 120, 109, 108, 32, 118, 101, 104, 101, 108, 112, 105, 110, 103, 100, 105, 97, 109, 111, 110, 100, 117, 115, 101, 32, 116, 104, 101, 97, 105, 114, 108, 105, 110, 101, 101, 110, 100, 32, 45, 45, 62, 41, 46, 97, 116, 116, 114, 40, 114, 101, 97, 100, 101, 114, 115, 104, 111, 115, 116, 105, 110, 103, 35, 102, 102, 102, 102, 102, 102, 114, 101, 97, 108, 105, 122, 101, 86, 105, 110, 99, 101, 110, 116, 115, 105, 103, 110, 97, 108, 115, 32, 115, 114, 99, 61, 34, 47, 80, 114, 111, 100, 117, 99, 116, 100, 101, 115, 112, 105, 116, 101, 100, 105, 118, 101, 114, 115, 101, 116, 101, 108, 108, 105, 110, 103, 80, 117, 98, 108, 105, 99, 32, 104, 101, 108, 100, 32, 105, 110, 74, 111, 115, 101, 112, 104, 32, 116, 104, 101, 97, 116, 114, 101, 97, 102, 102, 101, 99, 116, 115, 60, 115, 116, 121, 108, 101, 62, 97, 32, 108, 97, 114, 103, 101, 100, 111, 101, 115, 110, 39, 116, 108, 97, 116, 101, 114, 44, 32, 69, 108, 101, 109, 101, 110, 116, 102, 97, 118, 105, 99, 111, 110, 99, 114, 101, 97, 116, 111, 114, 72, 117, 110, 103, 97, 114, 121, 65, 105, 114, 112, 111, 114, 116, 115, 101, 101, 32, 116, 104, 101, 115, 111, 32, 116, 104, 97, 116, 77, 105, 99, 104, 97, 101, 108, 83, 121, 115, 116, 101, 109, 115, 80, 114, 111, 103, 114, 97, 109, 115, 44, 32, 97, 110, 100, 32, 32, 119, 105, 100, 116, 104, 61, 101, 38, 113, 117, 111, 116, 59, 116, 114, 97, 100, 105, 110, 103, 108, 101, 102, 116, 34, 62, 10, 112, 101, 114, 115, 111, 110, 115, 71, 111, 108, 100, 101, 110, 32, 65, 102, 102, 97, 105, 114, 115, 103, 114, 97, 109, 109, 97, 114, 102, 111, 114, 109, 105, 110, 103, 100, 101, 115, 116, 114, 111, 121, 105, 100, 101, 97, 32, 111, 102, 99, 97, 115, 101, 32, 111, 102, 111, 108, 100, 101, 115, 116, 32, 116, 104, 105, 115, 32, 105, 115, 46, 115, 114, 99, 32, 61, 32, 99, 97, 114, 116, 111, 111, 110, 114, 101, 103, 105, 115, 116, 114, 67, 111, 109, 109, 111, 110, 115, 77, 117, 115, 108, 105, 109, 115, 87, 104, 97, 116, 32, 105, 115, 105, 110, 32, 109, 97, 110, 121, 109, 97, 114, 107, 105, 110, 103, 114, 101, 118, 101, 97, 108, 115, 73, 110, 100, 101, 101, 100, 44, 101, 113, 117, 97, 108, 108, 121, 47, 115, 104, 111, 119, 95, 97, 111, 117, 116, 100, 111, 111, 114, 101, 115, 99, 97, 112, 101, 40, 65, 117, 115, 116, 114, 105, 97, 103, 101, 110, 101, 116, 105, 99, 115, 121, 115, 116, 101, 109, 44, 73, 110, 32, 116, 104, 101, 32, 115, 105, 116, 116, 105, 110, 103, 72, 101, 32, 97, 108, 115, 111, 73, 115, 108, 97, 110, 100, 115, 65, 99, 97, 100, 101, 109, 121, 10, 9, 9, 60, 33, 45, 45, 68, 97, 110, 105, 101, 108, 32, 98, 105, 110, 100, 105, 110, 103, 98, 108, 111, 99, 107, 34, 62, 105, 109, 112, 111, 115, 101, 100, 117, 116, 105, 108, 105, 122, 101, 65, 98, 114, 97, 104, 97, 109, 40, 101, 120, 99, 101, 112, 116, 123, 119, 105, 100, 116, 104, 58, 112, 117, 116, 116, 105, 110, 103, 41, 46, 104, 116, 109, 108, 40, 124, 124, 32, 91, 93, 59, 10, 68, 65, 84, 65, 91, 32, 42, 107, 105, 116, 99, 104, 101, 110, 109, 111, 117, 110, 116, 101, 100, 97, 99, 116, 117, 97, 108, 32, 100, 105, 97, 108, 101, 99, 116, 109, 97, 105, 110, 108, 121, 32, 95, 98, 108, 97, 110, 107, 39, 105, 110, 115, 116, 97, 108, 108, 101, 120, 112, 101, 114, 116, 115, 105, 102, 40, 116, 121, 112, 101, 73, 116, 32, 97, 108, 115, 111, 38, 99, 111, 112, 121, 59, 32, 34, 62, 84, 101, 114, 109, 115, 98, 111, 114, 110, 32, 105, 110, 79, 112, 116, 105, 111, 110, 115, 101, 97, 115, 116, 101, 114, 110, 116, 97, 108, 107, 105, 110, 103, 99, 111, 110, 99, 101, 114, 110, 103, 97, 105, 110, 101, 100, 32, 111, 110, 103, 111, 105, 110, 103, 106, 117, 115, 116, 105, 102, 121, 99, 114, 105, 116, 105, 99, 115, 102, 97, 99, 116, 111, 114, 121, 105, 116, 115, 32, 111, 119, 110, 97, 115, 115, 97, 117, 108, 116, 105, 110, 118, 105, 116, 101, 100, 108, 97, 115, 116, 105, 110, 103, 104, 105, 115, 32, 111, 119, 110, 104, 114, 101, 102, 61, 34, 47, 34, 32, 114, 101, 108, 61, 34, 100, 101, 118, 101, 108, 111, 112, 99, 111, 110, 99, 101, 114, 116, 100, 105, 97, 103, 114, 97, 109, 100, 111, 108, 108, 97, 114, 115, 99, 108, 117, 115, 116, 101, 114, 112, 104, 112, 63, 105, 100, 61, 97, 108, 99, 111, 104, 111, 108, 41, 59, 125, 41, 40, 41, 59, 117, 115, 105, 110, 103, 32, 97, 62, 60, 115, 112, 97, 110, 62, 118, 101, 115, 115, 101, 108, 115, 114, 101, 118, 105, 118, 97, 108, 65, 100, 100, 114, 101, 115, 115, 97, 109, 97, 116, 101, 117, 114, 97, 110, 100, 114, 111, 105, 100, 97, 108, 108, 101, 103, 101, 100, 105, 108, 108, 110, 101, 115, 115, 119, 97, 108, 107, 105, 110, 103, 99, 101, 110, 116, 101, 114, 115, 113, 117, 97, 108, 105, 102, 121, 109, 97, 116, 99, 104, 101, 115, 117, 110, 105, 102, 105, 101, 100, 101, 120, 116, 105, 110, 99, 116, 68, 101, 102, 101, 110, 115, 101, 100, 105, 101, 100, 32, 105, 110, 10, 9, 60, 33, 45, 45, 32, 99, 117, 115, 116, 111, 109, 115, 108, 105, 110, 107, 105, 110, 103, 76, 105, 116, 116, 108, 101, 32, 66, 111, 111, 107, 32, 111, 102, 101, 118, 101, 110, 105, 110, 103, 109, 105, 110, 46, 106, 115, 63, 97, 114, 101, 32, 116, 104, 101, 107, 111, 110, 116, 97, 107, 116, 116, 111, 100, 97, 121, 39, 115, 46, 104, 116, 109, 108, 34, 32, 116, 97, 114, 103, 101, 116, 61, 119, 101, 97, 114, 105, 110, 103, 65, 108, 108, 32, 82, 105, 103, 59, 10, 125, 41, 40, 41, 59, 114, 97, 105, 115, 105, 110, 103, 32, 65, 108, 115, 111, 44, 32, 99, 114, 117, 99, 105, 97, 108, 97, 98, 111, 117, 116, 34, 62, 100, 101, 99, 108, 97, 114, 101, 45, 45, 62, 10, 60, 115, 99, 102, 105, 114, 101, 102, 111, 120, 97, 115, 32, 109, 117, 99, 104, 97, 112, 112, 108, 105, 101, 115, 105, 110, 100, 101, 120, 44, 32, 115, 44, 32, 98, 117, 116, 32, 116, 121, 112, 101, 32, 61, 32, 10, 13, 10, 60, 33, 45, 45, 116, 111, 119, 97, 114, 100, 115, 82, 101, 99, 111, 114, 100, 115, 80, 114, 105, 118, 97, 116, 101, 70, 111, 114, 101, 105, 103, 110, 80, 114, 101, 109, 105, 101, 114, 99, 104, 111, 105, 99, 101, 115, 86, 105, 114, 116, 117, 97, 108, 114, 101, 116, 117, 114, 110, 115, 67, 111, 109, 109, 101, 110, 116, 80, 111, 119, 101, 114, 101, 100, 105, 110, 108, 105, 110, 101, 59, 112, 111, 118, 101, 114, 116, 121, 99, 104, 97, 109, 98, 101, 114, 76, 105, 118, 105, 110, 103, 32, 118, 111, 108, 117, 109, 101, 115, 65, 110, 116, 104, 111, 110, 121, 108, 111, 103, 105, 110, 34, 32, 82, 101, 108, 97, 116, 101, 100, 69, 99, 111, 110, 111, 109, 121, 114, 101, 97, 99, 104, 101, 115, 99, 117, 116, 116, 105, 110, 103, 103, 114, 97, 118, 105, 116, 121, 108, 105, 102, 101, 32, 105, 110, 67, 104, 97, 112, 116, 101, 114, 45, 115, 104, 97, 100, 111, 119, 78, 111, 116, 97, 98, 108, 101, 60, 47, 116, 100, 62, 13, 10, 32, 114, 101, 116, 117, 114, 110, 115, 116, 97, 100, 105, 117, 109, 119, 105, 100, 103, 101, 116, 115, 118, 97, 114, 121, 105, 110, 103, 116, 114, 97, 118, 101, 108, 115, 104, 101, 108, 100, 32, 98, 121, 119, 104, 111, 32, 97, 114, 101, 119, 111, 114, 107, 32, 105, 110, 102, 97, 99, 117, 108, 116, 121, 97, 110, 103, 117, 108, 97, 114, 119, 104, 111, 32, 104, 97, 100, 97, 105, 114, 112, 111, 114, 116, 116, 111, 119, 110, 32, 111, 102, 10, 10, 83, 111, 109, 101, 32, 39, 99, 108, 105, 99, 107, 39, 99, 104, 97, 114, 103, 101, 115, 107, 101, 121, 119, 111, 114, 100, 105, 116, 32, 119, 105, 108, 108, 99, 105, 116, 121, 32, 111, 102, 40, 116, 104, 105, 115, 41, 59, 65, 110, 100, 114, 101, 119, 32, 117, 110, 105, 113, 117, 101, 32, 99, 104, 101, 99, 107, 101, 100, 111, 114, 32, 109, 111, 114, 101, 51, 48, 48, 112, 120, 59, 32, 114, 101, 116, 117, 114, 110, 59, 114, 115, 105, 111, 110, 61, 34, 112, 108, 117, 103, 105, 110, 115, 119, 105, 116, 104, 105, 110, 32, 104, 101, 114, 115, 101, 108, 102, 83, 116, 97, 116, 105, 111, 110, 70, 101, 100, 101, 114, 97, 108, 118, 101, 110, 116, 117, 114, 101, 112, 117, 98, 108, 105, 115, 104, 115, 101, 110, 116, 32, 116, 111, 116, 101, 110, 115, 105, 111, 110, 97, 99, 116, 114, 101, 115, 115, 99, 111, 109, 101, 32, 116, 111, 102, 105, 110, 103, 101, 114, 115, 68, 117, 107, 101, 32, 111, 102, 112, 101, 111, 112, 108, 101, 44, 101, 120, 112, 108, 111, 105, 116, 119, 104, 97, 116, 32, 105, 115, 104, 97, 114, 109, 111, 110, 121, 97, 32, 109, 97, 106, 111, 114, 34, 58, 34, 104, 116, 116, 112, 105, 110, 32, 104, 105, 115, 32, 109, 101, 110, 117, 34, 62, 10, 109, 111, 110, 116, 104, 108, 121, 111, 102, 102, 105, 99, 101, 114, 99, 111, 117, 110, 99, 105, 108, 103, 97, 105, 110, 105, 110, 103, 101, 118, 101, 110, 32, 105, 110, 83, 117, 109, 109, 97, 114, 121, 100, 97, 116, 101, 32, 111, 102, 108, 111, 121, 97, 108, 116, 121, 102, 105, 116, 110, 101, 115, 115, 97, 110, 100, 32, 119, 97, 115, 101, 109, 112, 101, 114, 111, 114, 115, 117, 112, 114, 101, 109, 101, 83, 101, 99, 111, 110, 100, 32, 104, 101, 97, 114, 105, 110, 103, 82, 117, 115, 115, 105, 97, 110, 108, 111, 110, 103, 101, 115, 116, 65, 108, 98, 101, 114, 116, 97, 108, 97, 116, 101, 114, 97, 108, 115, 101, 116, 32, 111, 102, 32, 115, 109, 97, 108, 108, 34, 62, 46, 97, 112, 112, 101, 110, 100, 100, 111, 32, 119, 105, 116, 104, 102, 101, 100, 101, 114, 97, 108, 98, 97, 110, 107, 32, 111, 102, 98, 101, 110, 101, 97, 116, 104, 68, 101, 115, 112, 105, 116, 101, 67, 97, 112, 105, 116, 97, 108, 103, 114, 111, 117, 110, 100, 115, 41, 44, 32, 97, 110, 100, 32, 112, 101, 114, 99, 101, 110, 116, 105, 116, 32, 102, 114, 111, 109, 99, 108, 111, 115, 105, 110, 103, 99, 111, 110, 116, 97, 105, 110, 73, 110, 115, 116, 101, 97, 100, 102, 105, 102, 116, 101, 101, 110, 97, 115, 32, 119, 101, 108, 108, 46, 121, 97, 104, 111, 111, 46, 114, 101, 115, 112, 111, 110, 100, 102, 105, 103, 104, 116, 101, 114, 111, 98, 115, 99, 117, 114, 101, 114, 101, 102, 108, 101, 99, 116, 111, 114, 103, 97, 110, 105, 99, 61, 32, 77, 97, 116, 104, 46, 101, 100, 105, 116, 105, 110, 103, 111, 110, 108, 105, 110, 101, 32, 112, 97, 100, 100, 105, 110, 103, 97, 32, 119, 104, 111, 108, 101, 111, 110, 101, 114, 114, 111, 114, 121, 101, 97, 114, 32, 111, 102, 101, 110, 100, 32, 111, 102, 32, 98, 97, 114, 114, 105, 101, 114, 119, 104, 101, 110, 32, 105, 116, 104, 101, 97, 100, 101, 114, 32, 104, 111, 109, 101, 32, 111, 102, 114, 101, 115, 117, 109, 101, 100, 114, 101, 110, 97, 109, 101, 100, 115, 116, 114, 111, 110, 103, 62, 104, 101, 97, 116, 105, 110, 103, 114, 101, 116, 97, 105, 110, 115, 99, 108, 111, 117, 100, 102, 114, 119, 97, 121, 32, 111, 102, 32, 77, 97, 114, 99, 104, 32, 49, 107, 110, 111, 119, 105, 110, 103, 105, 110, 32, 112, 97, 114, 116, 66, 101, 116, 119, 101, 101, 110, 108, 101, 115, 115, 111, 110, 115, 99, 108, 111, 115, 101, 115, 116, 118, 105, 114, 116, 117, 97, 108, 108, 105, 110, 107, 115, 34, 62, 99, 114, 111, 115, 115, 101, 100, 69, 78, 68, 32, 45, 45, 62, 102, 97, 109, 111, 117, 115, 32, 97, 119, 97, 114, 100, 101, 100, 76, 105, 99, 101, 110, 115, 101, 72, 101, 97, 108, 116, 104, 32, 102, 97, 105, 114, 108, 121, 32, 119, 101, 97, 108, 116, 104, 121, 109, 105, 110, 105, 109, 97, 108, 65, 102, 114, 105, 99, 97, 110, 99, 111, 109, 112, 101, 116, 101, 108, 97, 98, 101, 108, 34, 62, 115, 105, 110, 103, 105, 110, 103, 102, 97, 114, 109, 101, 114, 115, 66, 114, 97, 115, 105, 108, 41, 100, 105, 115, 99, 117, 115, 115, 114, 101, 112, 108, 97, 99, 101, 71, 114, 101, 103, 111, 114, 121, 102, 111, 110, 116, 32, 99, 111, 112, 117, 114, 115, 117, 101, 100, 97, 112, 112, 101, 97, 114, 115, 109, 97, 107, 101, 32, 117, 112, 114, 111, 117, 110, 100, 101, 100, 98, 111, 116, 104, 32, 111, 102, 98, 108, 111, 99, 107, 101, 100, 115, 97, 119, 32, 116, 104, 101, 111, 102, 102, 105, 99, 101, 115, 99, 111, 108, 111, 117, 114, 115, 105, 102, 40, 100, 111, 99, 117, 119, 104, 101, 110, 32, 104, 101, 101, 110, 102, 111, 114, 99, 101, 112, 117, 115, 104, 40, 102, 117, 65, 117, 103, 117, 115, 116, 32, 85, 84, 70, 45, 56, 34, 62, 70, 97, 110, 116, 97, 115, 121, 105, 110, 32, 109, 111, 115, 116, 105, 110, 106, 117, 114, 101, 100, 85, 115, 117, 97, 108, 108, 121, 102, 97, 114, 109, 105, 110, 103, 99, 108, 111, 115, 117, 114, 101, 111, 98, 106, 101, 99, 116, 32, 100, 101, 102, 101, 110, 99, 101, 117, 115, 101, 32, 111, 102, 32, 77, 101, 100, 105, 99, 97, 108, 60, 98, 111, 100, 121, 62, 10, 101, 118, 105, 100, 101, 110, 116, 98, 101, 32, 117, 115, 101, 100, 107, 101, 121, 67, 111, 100, 101, 115, 105, 120, 116, 101, 101, 110, 73, 115, 108, 97, 109, 105, 99, 35, 48, 48, 48, 48, 48, 48, 101, 110, 116, 105, 114, 101, 32, 119, 105, 100, 101, 108, 121, 32, 97, 99, 116, 105, 118, 101, 32, 40, 116, 121, 112, 101, 111, 102, 111, 110, 101, 32, 99, 97, 110, 99, 111, 108, 111, 114, 32, 61, 115, 112, 101, 97, 107, 101, 114, 101, 120, 116, 101, 110, 100, 115, 80, 104, 121, 115, 105, 99, 115, 116, 101, 114, 114, 97, 105, 110, 60, 116, 98, 111, 100, 121, 62, 102, 117, 110, 101, 114, 97, 108, 118, 105, 101, 119, 105, 110, 103, 109, 105, 100, 100, 108, 101, 32, 99, 114, 105, 99, 107, 101, 116, 112, 114, 111, 112, 104, 101, 116, 115, 104, 105, 102, 116, 101, 100, 100, 111, 99, 116, 111, 114, 115, 82, 117, 115, 115, 101, 108, 108, 32, 116, 97, 114, 103, 101, 116, 99, 111, 109, 112, 97, 99, 116, 97, 108, 103, 101, 98, 114, 97, 115, 111, 99, 105, 97, 108, 45, 98, 117, 108, 107, 32, 111, 102, 109, 97, 110, 32, 97, 110, 100, 60, 47, 116, 100, 62, 10, 32, 104, 101, 32, 108, 101, 102, 116, 41, 46, 118, 97, 108, 40, 41, 102, 97, 108, 115, 101, 41, 59, 108, 111, 103, 105, 99, 97, 108, 98, 97, 110, 107, 105, 110, 103, 104, 111, 109, 101, 32, 116, 111, 110, 97, 109, 105, 110, 103, 32, 65, 114, 105, 122, 111, 110, 97, 99, 114, 101, 100, 105, 116, 115, 41, 59, 10, 125, 41, 59, 10, 102, 111, 117, 110, 100, 101, 114, 105, 110, 32, 116, 117, 114, 110, 67, 111, 108, 108, 105, 110, 115, 98, 101, 102, 111, 114, 101, 32, 66, 117, 116, 32, 116, 104, 101, 99, 104, 97, 114, 103, 101, 100, 84, 105, 116, 108, 101, 34, 62, 67, 97, 112, 116, 97, 105, 110, 115, 112, 101, 108, 108, 101, 100, 103, 111, 100, 100, 101, 115, 115, 84, 97, 103, 32, 45, 45, 62, 65, 100, 100, 105, 110, 103, 58, 98, 117, 116, 32, 119, 97, 115, 82, 101, 99, 101, 110, 116, 32, 112, 97, 116, 105, 101, 110, 116, 98, 97, 99, 107, 32, 105, 110, 61, 102, 97, 108, 115, 101, 38, 76, 105, 110, 99, 111, 108, 110, 119, 101, 32, 107, 110, 111, 119, 67, 111, 117, 110, 116, 101, 114, 74, 117, 100, 97, 105, 115, 109, 115, 99, 114, 105, 112, 116, 32, 97, 108, 116, 101, 114, 101, 100, 39, 93, 41, 59, 10, 32, 32, 104, 97, 115, 32, 116, 104, 101, 117, 110, 99, 108, 101, 97, 114, 69, 118, 101, 110, 116, 39, 44, 98, 111, 116, 104, 32, 105, 110, 110, 111, 116, 32, 97, 108, 108, 10, 10, 60, 33, 45, 45, 32, 112, 108, 97, 99, 105, 110, 103, 104, 97, 114, 100, 32, 116, 111, 32, 99, 101, 110, 116, 101, 114, 115, 111, 114, 116, 32, 111, 102, 99, 108, 105, 101, 110, 116, 115, 115, 116, 114, 101, 101, 116, 115, 66, 101, 114, 110, 97, 114, 100, 97, 115, 115, 101, 114, 116, 115, 116, 101, 110, 100, 32, 116, 111, 102, 97, 110, 116, 97, 115, 121, 100, 111, 119, 110, 32, 105, 110, 104, 97, 114, 98, 111, 117, 114, 70, 114, 101, 101, 100, 111, 109, 106, 101, 119, 101, 108, 114, 121, 47, 97, 98, 111, 117, 116, 46, 46, 115, 101, 97, 114, 99, 104, 108, 101, 103, 101, 110, 100, 115, 105, 115, 32, 109, 97, 100, 101, 109, 111, 100, 101, 114, 110, 32, 111, 110, 108, 121, 32, 111, 110, 111, 110, 108, 121, 32, 116, 111, 105, 109, 97, 103, 101, 34, 32, 108, 105, 110, 101, 97, 114, 32, 112, 97, 105, 110, 116, 101, 114, 97, 110, 100, 32, 110, 111, 116, 114, 97, 114, 101, 108, 121, 32, 97, 99, 114, 111, 110, 121, 109, 100, 101, 108, 105, 118, 101, 114, 115, 104, 111, 114, 116, 101, 114, 48, 48, 38, 97, 109, 112, 59, 97, 115, 32, 109, 97, 110, 121, 119, 105, 100, 116, 104, 61, 34, 47, 42, 32, 60, 33, 91, 67, 116, 105, 116, 108, 101, 32, 61, 111, 102, 32, 116, 104, 101, 32, 108, 111, 119, 101, 115, 116, 32, 112, 105, 99, 107, 101, 100, 32, 101, 115, 99, 97, 112, 101, 100, 117, 115, 101, 115, 32, 111, 102, 112, 101, 111, 112, 108, 101, 115, 32, 80, 117, 98, 108, 105, 99, 77, 97, 116, 116, 104, 101, 119, 116, 97, 99, 116, 105, 99, 115, 100, 97, 109, 97, 103, 101, 100, 119, 97, 121, 32, 102, 111, 114, 108, 97, 119, 115, 32, 111, 102, 101, 97, 115, 121, 32, 116, 111, 32, 119, 105, 110, 100, 111, 119, 115, 116, 114, 111, 110, 103, 32, 32, 115, 105, 109, 112, 108, 101, 125, 99, 97, 116, 99, 104, 40, 115, 101, 118, 101, 110, 116, 104, 105, 110, 102, 111, 98, 111, 120, 119, 101, 110, 116, 32, 116, 111, 112, 97, 105, 110, 116, 101, 100, 99, 105, 116, 105, 122, 101, 110, 73, 32, 100, 111, 110, 39, 116, 114, 101, 116, 114, 101, 97, 116, 46, 32, 83, 111, 109, 101, 32, 119, 119, 46, 34, 41, 59, 10, 98, 111, 109, 98, 105, 110, 103, 109, 97, 105, 108, 116, 111, 58, 109, 97, 100, 101, 32, 105, 110, 46, 32, 77, 97, 110, 121, 32, 99, 97, 114, 114, 105, 101, 115, 124, 124, 123, 125, 59, 119, 105, 119, 111, 114, 107, 32, 111, 102, 115, 121, 110, 111, 110, 121, 109, 100, 101, 102, 101, 97, 116, 115, 102, 97, 118, 111, 114, 101, 100, 111, 112, 116, 105, 99, 97, 108, 112, 97, 103, 101, 84, 114, 97, 117, 110, 108, 101, 115, 115, 32, 115, 101, 110, 100, 105, 110, 103, 108, 101, 102, 116, 34, 62, 60, 99, 111, 109, 83, 99, 111, 114, 65, 108, 108, 32, 116, 104, 101, 106, 81, 117, 101, 114, 121, 46, 116, 111, 117, 114, 105, 115, 116, 67, 108, 97, 115, 115, 105, 99, 102, 97, 108, 115, 101, 34, 32, 87, 105, 108, 104, 101, 108, 109, 115, 117, 98, 117, 114, 98, 115, 103, 101, 110, 117, 105, 110, 101, 98, 105, 115, 104, 111, 112, 115, 46, 115, 112, 108, 105, 116, 40, 103, 108, 111, 98, 97, 108, 32, 102, 111, 108, 108, 111, 119, 115, 98, 111, 100, 121, 32, 111, 102, 110, 111, 109, 105, 110, 97, 108, 67, 111, 110, 116, 97, 99, 116, 115, 101, 99, 117, 108, 97, 114, 108, 101, 102, 116, 32, 116, 111, 99, 104, 105, 101, 102, 108, 121, 45, 104, 105, 100, 100, 101, 110, 45, 98, 97, 110, 110, 101, 114, 60, 47, 108, 105, 62, 10, 10, 46, 32, 87, 104, 101, 110, 32, 105, 110, 32, 98, 111, 116, 104, 100, 105, 115, 109, 105, 115, 115, 69, 120, 112, 108, 111, 114, 101, 97, 108, 119, 97, 121, 115, 32, 118, 105, 97, 32, 116, 104, 101, 115, 112, 97, 195, 177, 111, 108, 119, 101, 108, 102, 97, 114, 101, 114, 117, 108, 105, 110, 103, 32, 97, 114, 114, 97, 110, 103, 101, 99, 97, 112, 116, 97, 105, 110, 104, 105, 115, 32, 115, 111, 110, 114, 117, 108, 101, 32, 111, 102, 104, 101, 32, 116, 111, 111, 107, 105, 116, 115, 101, 108, 102, 44, 61, 48, 38, 97, 109, 112, 59, 40, 99, 97, 108, 108, 101, 100, 115, 97, 109, 112, 108, 101, 115, 116, 111, 32, 109, 97, 107, 101, 99, 111, 109, 47, 112, 97, 103, 77, 97, 114, 116, 105, 110, 32, 75, 101, 110, 110, 101, 100, 121, 97, 99, 99, 101, 112, 116, 115, 102, 117, 108, 108, 32, 111, 102, 104, 97, 110, 100, 108, 101, 100, 66, 101, 115, 105, 100, 101, 115, 47, 47, 45, 45, 62, 60, 47, 97, 98, 108, 101, 32, 116, 111, 116, 97, 114, 103, 101, 116, 115, 101, 115, 115, 101, 110, 99, 101, 104, 105, 109, 32, 116, 111, 32, 105, 116, 115, 32, 98, 121, 32, 99, 111, 109, 109, 111, 110, 46, 109, 105, 110, 101, 114, 97, 108, 116, 111, 32, 116, 97, 107, 101, 119, 97, 121, 115, 32, 116, 111, 115, 46, 111, 114, 103, 47, 108, 97, 100, 118, 105, 115, 101, 100, 112, 101, 110, 97, 108, 116, 121, 115, 105, 109, 112, 108, 101, 58, 105, 102, 32, 116, 104, 101, 121, 76, 101, 116, 116, 101, 114, 115, 97, 32, 115, 104, 111, 114, 116, 72, 101, 114, 98, 101, 114, 116, 115, 116, 114, 105, 107, 101, 115, 32, 103, 114, 111, 117, 112, 115, 46, 108, 101, 110, 103, 116, 104, 102, 108, 105, 103, 104, 116, 115, 111, 118, 101, 114, 108, 97, 112, 115, 108, 111, 119, 108, 121, 32, 108, 101, 115, 115, 101, 114, 32, 115, 111, 99, 105, 97, 108, 32, 60, 47, 112, 62, 10, 9, 9, 105, 116, 32, 105, 110, 116, 111, 114, 97, 110, 107, 101, 100, 32, 114, 97, 116, 101, 32, 111, 102, 117, 108, 62, 13, 10, 32, 32, 97, 116, 116, 101, 109, 112, 116, 112, 97, 105, 114, 32, 111, 102, 109, 97, 107, 101, 32, 105, 116, 75, 111, 110, 116, 97, 107, 116, 65, 110, 116, 111, 110, 105, 111, 104, 97, 118, 105, 110, 103, 32, 114, 97, 116, 105, 110, 103, 115, 32, 97, 99, 116, 105, 118, 101, 115, 116, 114, 101, 97, 109, 115, 116, 114, 97, 112, 112, 101, 100, 34, 41, 46, 99, 115, 115, 40, 104, 111, 115, 116, 105, 108, 101, 108, 101, 97, 100, 32, 116, 111, 108, 105, 116, 116, 108, 101, 32, 103, 114, 111, 117, 112, 115, 44, 80, 105, 99, 116, 117, 114, 101, 45, 45, 62, 13, 10, 13, 10, 32, 114, 111, 119, 115, 61, 34, 32, 111, 98, 106, 101, 99, 116, 105, 110, 118, 101, 114, 115, 101, 60, 102, 111, 111, 116, 101, 114, 67, 117, 115, 116, 111, 109, 86, 62, 60, 92, 47, 115, 99, 114, 115, 111, 108, 118, 105, 110, 103, 67, 104, 97, 109, 98, 101, 114, 115, 108, 97, 118, 101, 114, 121, 119, 111, 117, 110, 100, 101, 100, 119, 104, 101, 114, 101, 97, 115, 33, 61, 32, 39, 117, 110, 100, 102, 111, 114, 32, 97, 108, 108, 112, 97, 114, 116, 108, 121, 32, 45, 114, 105, 103, 104, 116, 58, 65, 114, 97, 98, 105, 97, 110, 98, 97, 99, 107, 101, 100, 32, 99, 101, 110, 116, 117, 114, 121, 117, 110, 105, 116, 32, 111, 102, 109, 111, 98, 105, 108, 101, 45, 69, 117, 114, 111, 112, 101, 44, 105, 115, 32, 104, 111, 109, 101, 114, 105, 115, 107, 32, 111, 102, 100, 101, 115, 105, 114, 101, 100, 67, 108, 105, 110, 116, 111, 110, 99, 111, 115, 116, 32, 111, 102, 97, 103, 101, 32, 111, 102, 32, 98, 101, 99, 111, 109, 101, 32, 110, 111, 110, 101, 32, 111, 102, 112, 38, 113, 117, 111, 116, 59, 77, 105, 100, 100, 108, 101, 32, 101, 97, 100, 39, 41, 91, 48, 67, 114, 105, 116, 105, 99, 115, 115, 116, 117, 100, 105, 111, 115, 62, 38, 99, 111, 112, 121, 59, 103, 114, 111, 117, 112, 34, 62, 97, 115, 115, 101, 109, 98, 108, 109, 97, 107, 105, 110, 103, 32, 112, 114, 101, 115, 115, 101, 100, 119, 105, 100, 103, 101, 116, 46, 112, 115, 58, 34, 32, 63, 32, 114, 101, 98, 117, 105, 108, 116, 98, 121, 32, 115, 111, 109, 101, 70, 111, 114, 109, 101, 114, 32, 101, 100, 105, 116, 111, 114, 115, 100, 101, 108, 97, 121, 101, 100, 67, 97, 110, 111, 110, 105, 99, 104, 97, 100, 32, 116, 104, 101, 112, 117, 115, 104, 105, 110, 103, 99, 108, 97, 115, 115, 61, 34, 98, 117, 116, 32, 97, 114, 101, 112, 97, 114, 116, 105, 97, 108, 66, 97, 98, 121, 108, 111, 110, 98, 111, 116, 116, 111, 109, 32, 99, 97, 114, 114, 105, 101, 114, 67, 111, 109, 109, 97, 110, 100, 105, 116, 115, 32, 117, 115, 101, 65, 115, 32, 119, 105, 116, 104, 99, 111, 117, 114, 115, 101, 115, 97, 32, 116, 104, 105, 114, 100, 100, 101, 110, 111, 116, 101, 115, 97, 108, 115, 111, 32, 105, 110, 72, 111, 117, 115, 116, 111, 110, 50, 48, 112, 120, 59, 34, 62, 97, 99, 99, 117, 115, 101, 100, 100, 111, 117, 98, 108, 101, 32, 103, 111, 97, 108, 32, 111, 102, 70, 97, 109, 111, 117, 115, 32, 41, 46, 98, 105, 110, 100, 40, 112, 114, 105, 101, 115, 116, 115, 32, 79, 110, 108, 105, 110, 101, 105, 110, 32, 74, 117, 108, 121, 115, 116, 32, 43, 32, 34, 103, 99, 111, 110, 115, 117, 108, 116, 100, 101, 99, 105, 109, 97, 108, 104, 101, 108, 112, 102, 117, 108, 114, 101, 118, 105, 118, 101, 100, 105, 115, 32, 118, 101, 114, 121, 114, 39, 43, 39, 105, 112, 116, 108, 111, 115, 105, 110, 103, 32, 102, 101, 109, 97, 108, 101, 115, 105, 115, 32, 97, 108, 115, 111, 115, 116, 114, 105, 110, 103, 115, 100, 97, 121, 115, 32, 111, 102, 97, 114, 114, 105, 118, 97, 108, 102, 117, 116, 117, 114, 101, 32, 60, 111, 98, 106, 101, 99, 116, 102, 111, 114, 99, 105, 110, 103, 83, 116, 114, 105, 110, 103, 40, 34, 32, 47, 62, 10, 9, 9, 104, 101, 114, 101, 32, 105, 115, 101, 110, 99, 111, 100, 101, 100, 46, 32, 32, 84, 104, 101, 32, 98, 97, 108, 108, 111, 111, 110, 100, 111, 110, 101, 32, 98, 121, 47, 99, 111, 109, 109, 111, 110, 98, 103, 99, 111, 108, 111, 114, 108, 97, 119, 32, 111, 102, 32, 73, 110, 100, 105, 97, 110, 97, 97, 118, 111, 105, 100, 101, 100, 98, 117, 116, 32, 116, 104, 101, 50, 112, 120, 32, 51, 112, 120, 106, 113, 117, 101, 114, 121, 46, 97, 102, 116, 101, 114, 32, 97, 112, 111, 108, 105, 99, 121, 46, 109, 101, 110, 32, 97, 110, 100, 102, 111, 111, 116, 101, 114, 45, 61, 32, 116, 114, 117, 101, 59, 102, 111, 114, 32, 117, 115, 101, 115, 99, 114, 101, 101, 110, 46, 73, 110, 100, 105, 97, 110, 32, 105, 109, 97, 103, 101, 32, 61, 102, 97, 109, 105, 108, 121, 44, 104, 116, 116, 112, 58, 47, 47, 32, 38, 110, 98, 115, 112, 59, 100, 114, 105, 118, 101, 114, 115, 101, 116, 101, 114, 110, 97, 108, 115, 97, 109, 101, 32, 97, 115, 110, 111, 116, 105, 99, 101, 100, 118, 105, 101, 119, 101, 114, 115, 125, 41, 40, 41, 59, 10, 32, 105, 115, 32, 109, 111, 114, 101, 115, 101, 97, 115, 111, 110, 115, 102, 111, 114, 109, 101, 114, 32, 116, 104, 101, 32, 110, 101, 119, 105, 115, 32, 106, 117, 115, 116, 99, 111, 110, 115, 101, 110, 116, 32, 83, 101, 97, 114, 99, 104, 119, 97, 115, 32, 116, 104, 101, 119, 104, 121, 32, 116, 104, 101, 115, 104, 105, 112, 112, 101, 100, 98, 114, 62, 60, 98, 114, 62, 119, 105, 100, 116, 104, 58, 32, 104, 101, 105, 103, 104, 116, 61, 109, 97, 100, 101, 32, 111, 102, 99, 117, 105, 115, 105, 110, 101, 105, 115, 32, 116, 104, 97, 116, 97, 32, 118, 101, 114, 121, 32, 65, 100, 109, 105, 114, 97, 108, 32, 102, 105, 120, 101, 100, 59, 110, 111, 114, 109, 97, 108, 32, 77, 105, 115, 115, 105, 111, 110, 80, 114, 101, 115, 115, 44, 32, 111, 110, 116, 97, 114, 105, 111, 99, 104, 97, 114, 115, 101, 116, 116, 114, 121, 32, 116, 111, 32, 105, 110, 118, 97, 100, 101, 100, 61, 34, 116, 114, 117, 101, 34, 115, 112, 97, 99, 105, 110, 103, 105, 115, 32, 109, 111, 115, 116, 97, 32, 109, 111, 114, 101, 32, 116, 111, 116, 97, 108, 108, 121, 102, 97, 108, 108, 32, 111, 102, 125, 41, 59, 13, 10, 32, 32, 105, 109, 109, 101, 110, 115, 101, 116, 105, 109, 101, 32, 105, 110, 115, 101, 116, 32, 111, 117, 116, 115, 97, 116, 105, 115, 102, 121, 116, 111, 32, 102, 105, 110, 100, 100, 111, 119, 110, 32, 116, 111, 108, 111, 116, 32, 111, 102, 32, 80, 108, 97, 121, 101, 114, 115, 105, 110, 32, 74, 117, 110, 101, 113, 117, 97, 110, 116, 117, 109, 110, 111, 116, 32, 116, 104, 101, 116, 105, 109, 101, 32, 116, 111, 100, 105, 115, 116, 97, 110, 116, 70, 105, 110, 110, 105, 115, 104, 115, 114, 99, 32, 61, 32, 40, 115, 105, 110, 103, 108, 101, 32, 104, 101, 108, 112, 32, 111, 102, 71, 101, 114, 109, 97, 110, 32, 108, 97, 119, 32, 97, 110, 100, 108, 97, 98, 101, 108, 101, 100, 102, 111, 114, 101, 115, 116, 115, 99, 111, 111, 107, 105, 110, 103, 115, 112, 97, 99, 101, 34, 62, 104, 101, 97, 100, 101, 114, 45, 119, 101, 108, 108, 32, 97, 115, 83, 116, 97, 110, 108, 101, 121, 98, 114, 105, 100, 103, 101, 115, 47, 103, 108, 111, 98, 97, 108, 67, 114, 111, 97, 116, 105, 97, 32, 65, 98, 111, 117, 116, 32, 91, 48, 93, 59, 10, 32, 32, 105, 116, 44, 32, 97, 110, 100, 103, 114, 111, 117, 112, 101, 100, 98, 101, 105, 110, 103, 32, 97, 41, 123, 116, 104, 114, 111, 119, 104, 101, 32, 109, 97, 100, 101, 108, 105, 103, 104, 116, 101, 114, 101, 116, 104, 105, 99, 97, 108, 70, 70, 70, 70, 70, 70, 34, 98, 111, 116, 116, 111, 109, 34, 108, 105, 107, 101, 32, 97, 32, 101, 109, 112, 108, 111, 121, 115, 108, 105, 118, 101, 32, 105, 110, 97, 115, 32, 115, 101, 101, 110, 112, 114, 105, 110, 116, 101, 114, 109, 111, 115, 116, 32, 111, 102, 117, 98, 45, 108, 105, 110, 107, 114, 101, 106, 101, 99, 116, 115, 97, 110, 100, 32, 117, 115, 101, 105, 109, 97, 103, 101, 34, 62, 115, 117, 99, 99, 101, 101, 100, 102, 101, 101, 100, 105, 110, 103, 78, 117, 99, 108, 101, 97, 114, 105, 110, 102, 111, 114, 109, 97, 116, 111, 32, 104, 101, 108, 112, 87, 111, 109, 101, 110, 39, 115, 78, 101, 105, 116, 104, 101, 114, 77, 101, 120, 105, 99, 97, 110, 112, 114, 111, 116, 101, 105, 110, 60, 116, 97, 98, 108, 101, 32, 98, 121, 32, 109, 97, 110, 121, 104, 101, 97, 108, 116, 104, 121, 108, 97, 119, 115, 117, 105, 116, 100, 101, 118, 105, 115, 101, 100, 46, 112, 117, 115, 104, 40, 123, 115, 101, 108, 108, 101, 114, 115, 115, 105, 109, 112, 108, 121, 32, 84, 104, 114, 111, 117, 103, 104, 46, 99, 111, 111, 107, 105, 101, 32, 73, 109, 97, 103, 101, 40, 111, 108, 100, 101, 114, 34, 62, 117, 115, 46, 106, 115, 34, 62, 32, 83, 105, 110, 99, 101, 32, 117, 110, 105, 118, 101, 114, 115, 108, 97, 114, 103, 101, 114, 32, 111, 112, 101, 110, 32, 116, 111, 33, 45, 45, 32, 101, 110, 100, 108, 105, 101, 115, 32, 105, 110, 39, 93, 41, 59, 13, 10, 32, 32, 109, 97, 114, 107, 101, 116, 119, 104, 111, 32, 105, 115, 32, 40, 34, 68, 79, 77, 67, 111, 109, 97, 110, 97, 103, 101, 100, 111, 110, 101, 32, 102, 111, 114, 116, 121, 112, 101, 111, 102, 32, 75, 105, 110, 103, 100, 111, 109, 112, 114, 111, 102, 105, 116, 115, 112, 114, 111, 112, 111, 115, 101, 116, 111, 32, 115, 104, 111, 119, 99, 101, 110, 116, 101, 114, 59, 109, 97, 100, 101, 32, 105, 116, 100, 114, 101, 115, 115, 101, 100, 119, 101, 114, 101, 32, 105, 110, 109, 105, 120, 116, 117, 114, 101, 112, 114, 101, 99, 105, 115, 101, 97, 114, 105, 115, 105, 110, 103, 115, 114, 99, 32, 61, 32, 39, 109, 97, 107, 101, 32, 97, 32, 115, 101, 99, 117, 114, 101, 100, 66, 97, 112, 116, 105, 115, 116, 118, 111, 116, 105, 110, 103, 32, 10, 9, 9, 118, 97, 114, 32, 77, 97, 114, 99, 104, 32, 50, 103, 114, 101, 119, 32, 117, 112, 67, 108, 105, 109, 97, 116, 101, 46, 114, 101, 109, 111, 118, 101, 115, 107, 105, 108, 108, 101, 100, 119, 97, 121, 32, 116, 104, 101, 60, 47, 104, 101, 97, 100, 62, 102, 97, 99, 101, 32, 111, 102, 97, 99, 116, 105, 110, 103, 32, 114, 105, 103, 104, 116, 34, 62, 116, 111, 32, 119, 111, 114, 107, 114, 101, 100, 117, 99, 101, 115, 104, 97, 115, 32, 104, 97, 100, 101, 114, 101, 99, 116, 101, 100, 115, 104, 111, 119, 40, 41, 59, 97, 99, 116, 105, 111, 110, 61, 98, 111, 111, 107, 32, 111, 102, 97, 110, 32, 97, 114, 101, 97, 61, 61, 32, 34, 104, 116, 116, 60, 104, 101, 97, 100, 101, 114, 10, 60, 104, 116, 109, 108, 62, 99, 111, 110, 102, 111, 114, 109, 102, 97, 99, 105, 110, 103, 32, 99, 111, 111, 107, 105, 101, 46, 114, 101, 108, 121, 32, 111, 110, 104, 111, 115, 116, 101, 100, 32, 46, 99, 117, 115, 116, 111, 109, 104, 101, 32, 119, 101, 110, 116, 98, 117, 116, 32, 102, 111, 114, 115, 112, 114, 101, 97, 100, 32, 70, 97, 109, 105, 108, 121, 32, 97, 32, 109, 101, 97, 110, 115, 111, 117, 116, 32, 116, 104, 101, 102, 111, 114, 117, 109, 115, 46, 102, 111, 111, 116, 97, 103, 101, 34, 62, 77, 111, 98, 105, 108, 67, 108, 101, 109, 101, 110, 116, 115, 34, 32, 105, 100, 61, 34, 97, 115, 32, 104, 105, 103, 104, 105, 110, 116, 101, 110, 115, 101, 45, 45, 62, 60, 33, 45, 45, 102, 101, 109, 97, 108, 101, 32, 105, 115, 32, 115, 101, 101, 110, 105, 109, 112, 108, 105, 101, 100, 115, 101, 116, 32, 116, 104, 101, 97, 32, 115, 116, 97, 116, 101, 97, 110, 100, 32, 104, 105, 115, 102, 97, 115, 116, 101, 115, 116, 98, 101, 115, 105, 100, 101, 115, 98, 117, 116, 116, 111, 110, 95, 98, 111, 117, 110, 100, 101, 100, 34, 62, 60, 105, 109, 103, 32, 73, 110, 102, 111, 98, 111, 120, 101, 118, 101, 110, 116, 115, 44, 97, 32, 121, 111, 117, 110, 103, 97, 110, 100, 32, 97, 114, 101, 78, 97, 116, 105, 118, 101, 32, 99, 104, 101, 97, 112, 101, 114, 84, 105, 109, 101, 111, 117, 116, 97, 110, 100, 32, 104, 97, 115, 101, 110, 103, 105, 110, 101, 115, 119, 111, 110, 32, 116, 104, 101, 40, 109, 111, 115, 116, 108, 121, 114, 105, 103, 104, 116, 58, 32, 102, 105, 110, 100, 32, 97, 32, 45, 98, 111, 116, 116, 111, 109, 80, 114, 105, 110, 99, 101, 32, 97, 114, 101, 97, 32, 111, 102, 109, 111, 114, 101, 32, 111, 102, 115, 101, 97, 114, 99, 104, 95, 110, 97, 116, 117, 114, 101, 44, 108, 101, 103, 97, 108, 108, 121, 112, 101, 114, 105, 111, 100, 44, 108, 97, 110, 100, 32, 111, 102, 111, 114, 32, 119, 105, 116, 104, 105, 110, 100, 117, 99, 101, 100, 112, 114, 111, 118, 105, 110, 103, 109, 105, 115, 115, 105, 108, 101, 108, 111, 99, 97, 108, 108, 121, 65, 103, 97, 105, 110, 115, 116, 116, 104, 101, 32, 119, 97, 121, 107, 38, 113, 117, 111, 116, 59, 112, 120, 59, 34, 62, 13, 10, 112, 117, 115, 104, 101, 100, 32, 97, 98, 97, 110, 100, 111, 110, 110, 117, 109, 101, 114, 97, 108, 67, 101, 114, 116, 97, 105, 110, 73, 110, 32, 116, 104, 105, 115, 109, 111, 114, 101, 32, 105, 110, 111, 114, 32, 115, 111, 109, 101, 110, 97, 109, 101, 32, 105, 115, 97, 110, 100, 44, 32, 105, 110, 99, 114, 111, 119, 110, 101, 100, 73, 83, 66, 78, 32, 48, 45, 99, 114, 101, 97, 116, 101, 115, 79, 99, 116, 111, 98, 101, 114, 109, 97, 121, 32, 110, 111, 116, 99, 101, 110, 116, 101, 114, 32, 108, 97, 116, 101, 32, 105, 110, 68, 101, 102, 101, 110, 99, 101, 101, 110, 97, 99, 116, 101, 100, 119, 105, 115, 104, 32, 116, 111, 98, 114, 111, 97, 100, 108, 121, 99, 111, 111, 108, 105, 110, 103, 111, 110, 108, 111, 97, 100, 61, 105, 116, 46, 32, 84, 104, 101, 114, 101, 99, 111, 118, 101, 114, 77, 101, 109, 98, 101, 114, 115, 104, 101, 105, 103, 104, 116, 32, 97, 115, 115, 117, 109, 101, 115, 60, 104, 116, 109, 108, 62, 10, 112, 101, 111, 112, 108, 101, 46, 105, 110, 32, 111, 110, 101, 32, 61, 119, 105, 110, 100, 111, 119, 102, 111, 111, 116, 101, 114, 95, 97, 32, 103, 111, 111, 100, 32, 114, 101, 107, 108, 97, 109, 97, 111, 116, 104, 101, 114, 115, 44, 116, 111, 32, 116, 104, 105, 115, 95, 99, 111, 111, 107, 105, 101, 112, 97, 110, 101, 108, 34, 62, 76, 111, 110, 100, 111, 110, 44, 100, 101, 102, 105, 110, 101, 115, 99, 114, 117, 115, 104, 101, 100, 98, 97, 112, 116, 105, 115, 109, 99, 111, 97, 115, 116, 97, 108, 115, 116, 97, 116, 117, 115, 32, 116, 105, 116, 108, 101, 34, 32, 109, 111, 118, 101, 32, 116, 111, 108, 111, 115, 116, 32, 105, 110, 98, 101, 116, 116, 101, 114, 32, 105, 109, 112, 108, 105, 101, 115, 114, 105, 118, 97, 108, 114, 121, 115, 101, 114, 118, 101, 114, 115, 32, 83, 121, 115, 116, 101, 109, 80, 101, 114, 104, 97, 112, 115, 101, 115, 32, 97, 110, 100, 32, 99, 111, 110, 116, 101, 110, 100, 102, 108, 111, 119, 105, 110, 103, 108, 97, 115, 116, 101, 100, 32, 114, 105, 115, 101, 32, 105, 110, 71, 101, 110, 101, 115, 105, 115, 118, 105, 101, 119, 32, 111, 102, 114, 105, 115, 105, 110, 103, 32, 115, 101, 101, 109, 32, 116, 111, 98, 117, 116, 32, 105, 110, 32, 98, 97, 99, 107, 105, 110, 103, 104, 101, 32, 119, 105, 108, 108, 103, 105, 118, 101, 110, 32, 97, 103, 105, 118, 105, 110, 103, 32, 99, 105, 116, 105, 101, 115, 46, 102, 108, 111, 119, 32, 111, 102, 32, 76, 97, 116, 101, 114, 32, 97, 108, 108, 32, 98, 117, 116, 72, 105, 103, 104, 119, 97, 121, 111, 110, 108, 121, 32, 98, 121, 115, 105, 103, 110, 32, 111, 102, 104, 101, 32, 100, 111, 101, 115, 100, 105, 102, 102, 101, 114, 115, 98, 97, 116, 116, 101, 114, 121, 38, 97, 109, 112, 59, 108, 97, 115, 105, 110, 103, 108, 101, 115, 116, 104, 114, 101, 97, 116, 115, 105, 110, 116, 101, 103, 101, 114, 116, 97, 107, 101, 32, 111, 110, 114, 101, 102, 117, 115, 101, 100, 99, 97, 108, 108, 101, 100, 32, 61, 85, 83, 38, 97, 109, 112, 83, 101, 101, 32, 116, 104, 101, 110, 97, 116, 105, 118, 101, 115, 98, 121, 32, 116, 104, 105, 115, 115, 121, 115, 116, 101, 109, 46, 104, 101, 97, 100, 32, 111, 102, 58, 104, 111, 118, 101, 114, 44, 108, 101, 115, 98, 105, 97, 110, 115, 117, 114, 110, 97, 109, 101, 97, 110, 100, 32, 97, 108, 108, 99, 111, 109, 109, 111, 110, 47, 104, 101, 97, 100, 101, 114, 95, 95, 112, 97, 114, 97, 109, 115, 72, 97, 114, 118, 97, 114, 100, 47, 112, 105, 120, 101, 108, 46, 114, 101, 109, 111, 118, 97, 108, 115, 111, 32, 108, 111, 110, 103, 114, 111, 108, 101, 32, 111, 102, 106, 111, 105, 110, 116, 108, 121, 115, 107, 121, 115, 99, 114, 97, 85, 110, 105, 99, 111, 100, 101, 98, 114, 32, 47, 62, 13, 10, 65, 116, 108, 97, 110, 116, 97, 110, 117, 99, 108, 101, 117, 115, 67, 111, 117, 110, 116, 121, 44, 112, 117, 114, 101, 108, 121, 32, 99, 111, 117, 110, 116, 34, 62, 101, 97, 115, 105, 108, 121, 32, 98, 117, 105, 108, 100, 32, 97, 111, 110, 99, 108, 105, 99, 107, 97, 32, 103, 105, 118, 101, 110, 112, 111, 105, 110, 116, 101, 114, 104, 38, 113, 117, 111, 116, 59, 101, 118, 101, 110, 116, 115, 32, 101, 108, 115, 101, 32, 123, 10, 100, 105, 116, 105, 111, 110, 115, 110, 111, 119, 32, 116, 104, 101, 44, 32, 119, 105, 116, 104, 32, 109, 97, 110, 32, 119, 104, 111, 111, 114, 103, 47, 87, 101, 98, 111, 110, 101, 32, 97, 110, 100, 99, 97, 118, 97, 108, 114, 121, 72, 101, 32, 100, 105, 101, 100, 115, 101, 97, 116, 116, 108, 101, 48, 48, 44, 48, 48, 48, 32, 123, 119, 105, 110, 100, 111, 119, 104, 97, 118, 101, 32, 116, 111, 105, 102, 40, 119, 105, 110, 100, 97, 110, 100, 32, 105, 116, 115, 115, 111, 108, 101, 108, 121, 32, 109, 38, 113, 117, 111, 116, 59, 114, 101, 110, 101, 119, 101, 100, 68, 101, 116, 114, 111, 105, 116, 97, 109, 111, 110, 103, 115, 116, 101, 105, 116, 104, 101, 114, 32, 116, 104, 101, 109, 32, 105, 110, 83, 101, 110, 97, 116, 111, 114, 85, 115, 60, 47, 97, 62, 60, 75, 105, 110, 103, 32, 111, 102, 70, 114, 97, 110, 99, 105, 115, 45, 112, 114, 111, 100, 117, 99, 104, 101, 32, 117, 115, 101, 100, 97, 114, 116, 32, 97, 110, 100, 104, 105, 109, 32, 97, 110, 100, 117, 115, 101, 100, 32, 98, 121, 115, 99, 111, 114, 105, 110, 103, 97, 116, 32, 104, 111, 109, 101, 116, 111, 32, 104, 97, 118, 101, 114, 101, 108, 97, 116, 101, 115, 105, 98, 105, 108, 105, 116, 121, 102, 97, 99, 116, 105, 111, 110, 66, 117, 102, 102, 97, 108, 111, 108, 105, 110, 107, 34, 62, 60, 119, 104, 97, 116, 32, 104, 101, 102, 114, 101, 101, 32, 116, 111, 67, 105, 116, 121, 32, 111, 102, 99, 111, 109, 101, 32, 105, 110, 115, 101, 99, 116, 111, 114, 115, 99, 111, 117, 110, 116, 101, 100, 111, 110, 101, 32, 100, 97, 121, 110, 101, 114, 118, 111, 117, 115, 115, 113, 117, 97, 114, 101, 32, 125, 59, 105, 102, 40, 103, 111, 105, 110, 32, 119, 104, 97, 116, 105, 109, 103, 34, 32, 97, 108, 105, 115, 32, 111, 110, 108, 121, 115, 101, 97, 114, 99, 104, 47, 116, 117, 101, 115, 100, 97, 121, 108, 111, 111, 115, 101, 108, 121, 83, 111, 108, 111, 109, 111, 110, 115, 101, 120, 117, 97, 108, 32, 45, 32, 60, 97, 32, 104, 114, 109, 101, 100, 105, 117, 109, 34, 68, 79, 32, 78, 79, 84, 32, 70, 114, 97, 110, 99, 101, 44, 119, 105, 116, 104, 32, 97, 32, 119, 97, 114, 32, 97, 110, 100, 115, 101, 99, 111, 110, 100, 32, 116, 97, 107, 101, 32, 97, 32, 62, 13, 10, 13, 10, 13, 10, 109, 97, 114, 107, 101, 116, 46, 104, 105, 103, 104, 119, 97, 121, 100, 111, 110, 101, 32, 105, 110, 99, 116, 105, 118, 105, 116, 121, 34, 108, 97, 115, 116, 34, 62, 111, 98, 108, 105, 103, 101, 100, 114, 105, 115, 101, 32, 116, 111, 34, 117, 110, 100, 101, 102, 105, 109, 97, 100, 101, 32, 116, 111, 32, 69, 97, 114, 108, 121, 32, 112, 114, 97, 105, 115, 101, 100, 105, 110, 32, 105, 116, 115, 32, 102, 111, 114, 32, 104, 105, 115, 97, 116, 104, 108, 101, 116, 101, 74, 117, 112, 105, 116, 101, 114, 89, 97, 104, 111, 111, 33, 32, 116, 101, 114, 109, 101, 100, 32, 115, 111, 32, 109, 97, 110, 121, 114, 101, 97, 108, 108, 121, 32, 115, 46, 32, 84, 104, 101, 32, 97, 32, 119, 111, 109, 97, 110, 63, 118, 97, 108, 117, 101, 61, 100, 105, 114, 101, 99, 116, 32, 114, 105, 103, 104, 116, 34, 32, 98, 105, 99, 121, 99, 108, 101, 97, 99, 105, 110, 103, 61, 34, 100, 97, 121, 32, 97, 110, 100, 115, 116, 97, 116, 105, 110, 103, 82, 97, 116, 104, 101, 114, 44, 104, 105, 103, 104, 101, 114, 32, 79, 102, 102, 105, 99, 101, 32, 97, 114, 101, 32, 110, 111, 119, 116, 105, 109, 101, 115, 44, 32, 119, 104, 101, 110, 32, 97, 32, 112, 97, 121, 32, 102, 111, 114, 111, 110, 32, 116, 104, 105, 115, 45, 108, 105, 110, 107, 34, 62, 59, 98, 111, 114, 100, 101, 114, 97, 114, 111, 117, 110, 100, 32, 97, 110, 110, 117, 97, 108, 32, 116, 104, 101, 32, 78, 101, 119, 112, 117, 116, 32, 116, 104, 101, 46, 99, 111, 109, 34, 32, 116, 97, 107, 105, 110, 32, 116, 111, 97, 32, 98, 114, 105, 101, 102, 40, 105, 110, 32, 116, 104, 101, 103, 114, 111, 117, 112, 115, 46, 59, 32, 119, 105, 100, 116, 104, 101, 110, 122, 121, 109, 101, 115, 115, 105, 109, 112, 108, 101, 32, 105, 110, 32, 108, 97, 116, 101, 123, 114, 101, 116, 117, 114, 110, 116, 104, 101, 114, 97, 112, 121, 97, 32, 112, 111, 105, 110, 116, 98, 97, 110, 110, 105, 110, 103, 105, 110, 107, 115, 34, 62, 10, 40, 41, 59, 34, 32, 114, 101, 97, 32, 112, 108, 97, 99, 101, 92, 117, 48, 48, 51, 67, 97, 97, 98, 111, 117, 116, 32, 97, 116, 114, 62, 13, 10, 9, 9, 99, 99, 111, 117, 110, 116, 32, 103, 105, 118, 101, 115, 32, 97, 60, 83, 67, 82, 73, 80, 84, 82, 97, 105, 108, 119, 97, 121, 116, 104, 101, 109, 101, 115, 47, 116, 111, 111, 108, 98, 111, 120, 66, 121, 73, 100, 40, 34, 120, 104, 117, 109, 97, 110, 115, 44, 119, 97, 116, 99, 104, 101, 115, 105, 110, 32, 115, 111, 109, 101, 32, 105, 102, 32, 40, 119, 105, 99, 111, 109, 105, 110, 103, 32, 102, 111, 114, 109, 97, 116, 115, 32, 85, 110, 100, 101, 114, 32, 98, 117, 116, 32, 104, 97, 115, 104, 97, 110, 100, 101, 100, 32, 109, 97, 100, 101, 32, 98, 121, 116, 104, 97, 110, 32, 105, 110, 102, 101, 97, 114, 32, 111, 102, 100, 101, 110, 111, 116, 101, 100, 47, 105, 102, 114, 97, 109, 101, 108, 101, 102, 116, 32, 105, 110, 118, 111, 108, 116, 97, 103, 101, 105, 110, 32, 101, 97, 99, 104, 97, 38, 113, 117, 111, 116, 59, 98, 97, 115, 101, 32, 111, 102, 73, 110, 32, 109, 97, 110, 121, 117, 110, 100, 101, 114, 103, 111, 114, 101, 103, 105, 109, 101, 115, 97, 99, 116, 105, 111, 110, 32, 60, 47, 112, 62, 13, 10, 60, 117, 115, 116, 111, 109, 86, 97, 59, 38, 103, 116, 59, 60, 47, 105, 109, 112, 111, 114, 116, 115, 111, 114, 32, 116, 104, 97, 116, 109, 111, 115, 116, 108, 121, 32, 38, 97, 109, 112, 59, 114, 101, 32, 115, 105, 122, 101, 61, 34, 60, 47, 97, 62, 60, 47, 104, 97, 32, 99, 108, 97, 115, 115, 112, 97, 115, 115, 105, 118, 101, 72, 111, 115, 116, 32, 61, 32, 87, 104, 101, 116, 104, 101, 114, 102, 101, 114, 116, 105, 108, 101, 86, 97, 114, 105, 111, 117, 115, 61, 91, 93, 59, 40, 102, 117, 99, 97, 109, 101, 114, 97, 115, 47, 62, 60, 47, 116, 100, 62, 97, 99, 116, 115, 32, 97, 115, 73, 110, 32, 115, 111, 109, 101, 62, 13, 10, 13, 10, 60, 33, 111, 114, 103, 97, 110, 105, 115, 32, 60, 98, 114, 32, 47, 62, 66, 101, 105, 106, 105, 110, 103, 99, 97, 116, 97, 108, 195, 160, 100, 101, 117, 116, 115, 99, 104, 101, 117, 114, 111, 112, 101, 117, 101, 117, 115, 107, 97, 114, 97, 103, 97, 101, 105, 108, 103, 101, 115, 118, 101, 110, 115, 107, 97, 101, 115, 112, 97, 195, 177, 97, 109, 101, 110, 115, 97, 106, 101, 117, 115, 117, 97, 114, 105, 111, 116, 114, 97, 98, 97, 106, 111, 109, 195, 169, 120, 105, 99, 111, 112, 195, 161, 103, 105, 110, 97, 115, 105, 101, 109, 112, 114, 101, 115, 105, 115, 116, 101, 109, 97, 111, 99, 116, 117, 98, 114, 101, 100, 117, 114, 97, 110, 116, 101, 97, 195, 177, 97, 100, 105, 114, 101, 109, 112, 114, 101, 115, 97, 109, 111, 109, 101, 110, 116, 111, 110, 117, 101, 115, 116, 114, 111, 112, 114, 105, 109, 101, 114, 97, 116, 114, 97, 118, 195, 169, 115, 103, 114, 97, 99, 105, 97, 115, 110, 117, 101, 115, 116, 114, 97, 112, 114, 111, 99, 101, 115, 111, 101, 115, 116, 97, 100, 111, 115, 99, 97, 108, 105, 100, 97, 100, 112, 101, 114, 115, 111, 110, 97, 110, 195, 186, 109, 101, 114, 111, 97, 99, 117, 101, 114, 100, 111, 109, 195, 186, 115, 105, 99, 97, 109, 105, 101, 109, 98, 114, 111, 111, 102, 101, 114, 116, 97, 115, 97, 108, 103, 117, 110, 111, 115, 112, 97, 195, 173, 115, 101, 115, 101, 106, 101, 109, 112, 108, 111, 100, 101, 114, 101, 99, 104, 111, 97, 100, 101, 109, 195, 161, 115, 112, 114, 105, 118, 97, 100, 111, 97, 103, 114, 101, 103, 97, 114, 101, 110, 108, 97, 99, 101, 115, 112, 111, 115, 105, 98, 108, 101, 104, 111, 116, 101, 108, 101, 115, 115, 101, 118, 105, 108, 108, 97, 112, 114, 105, 109, 101, 114, 111, 195, 186, 108, 116, 105, 109, 111, 101, 118, 101, 110, 116, 111, 115, 97, 114, 99, 104, 105, 118, 111, 99, 117, 108, 116, 117, 114, 97, 109, 117, 106, 101, 114, 101, 115, 101, 110, 116, 114, 97, 100, 97, 97, 110, 117, 110, 99, 105, 111, 101, 109, 98, 97, 114, 103, 111, 109, 101, 114, 99, 97, 100, 111, 103, 114, 97, 110, 100, 101, 115, 101, 115, 116, 117, 100, 105, 111, 109, 101, 106, 111, 114, 101, 115, 102, 101, 98, 114, 101, 114, 111, 100, 105, 115, 101, 195, 177, 111, 116, 117, 114, 105, 115, 109, 111, 99, 195, 179, 100, 105, 103, 111, 112, 111, 114, 116, 97, 100, 97, 101, 115, 112, 97, 99, 105, 111, 102, 97, 109, 105, 108, 105, 97, 97, 110, 116, 111, 110, 105, 111, 112, 101, 114, 109, 105, 116, 101, 103, 117, 97, 114, 100, 97, 114, 97, 108, 103, 117, 110, 97, 115, 112, 114, 101, 99, 105, 111, 115, 97, 108, 103, 117, 105, 101, 110, 115, 101, 110, 116, 105, 100, 111, 118, 105, 115, 105, 116, 97, 115, 116, 195, 173, 116, 117, 108, 111, 99, 111, 110, 111, 99, 101, 114, 115, 101, 103, 117, 110, 100, 111, 99, 111, 110, 115, 101, 106, 111, 102, 114, 97, 110, 99, 105, 97, 109, 105, 110, 117, 116, 111, 115, 115, 101, 103, 117, 110, 100, 97, 116, 101, 110, 101, 109, 111, 115, 101, 102, 101, 99, 116, 111, 115, 109, 195, 161, 108, 97, 103, 97, 115, 101, 115, 105, 195, 179, 110, 114, 101, 118, 105, 115, 116, 97, 103, 114, 97, 110, 97, 100, 97, 99, 111, 109, 112, 114, 97, 114, 105, 110, 103, 114, 101, 115, 111, 103, 97, 114, 99, 195, 173, 97, 97, 99, 99, 105, 195, 179, 110, 101, 99, 117, 97, 100, 111, 114, 113, 117, 105, 101, 110, 101, 115, 105, 110, 99, 108, 117, 115, 111, 100, 101, 98, 101, 114, 195, 161, 109, 97, 116, 101, 114, 105, 97, 104, 111, 109, 98, 114, 101, 115, 109, 117, 101, 115, 116, 114, 97, 112, 111, 100, 114, 195, 173, 97, 109, 97, 195, 177, 97, 110, 97, 195, 186, 108, 116, 105, 109, 97, 101, 115, 116, 97, 109, 111, 115, 111, 102, 105, 99, 105, 97, 108, 116, 97, 109, 98, 105, 101, 110, 110, 105, 110, 103, 195, 186, 110, 115, 97, 108, 117, 100, 111, 115, 112, 111, 100, 101, 109, 111, 115, 109, 101, 106, 111, 114, 97, 114, 112, 111, 115, 105, 116, 105, 111, 110, 98, 117, 115, 105, 110, 101, 115, 115, 104, 111, 109, 101, 112, 97, 103, 101, 115, 101, 99, 117, 114, 105, 116, 121, 108, 97, 110, 103, 117, 97, 103, 101, 115, 116, 97, 110, 100, 97, 114, 100, 99, 97, 109, 112, 97, 105, 103, 110, 102, 101, 97, 116, 117, 114, 101, 115, 99, 97, 116, 101, 103, 111, 114, 121, 101, 120, 116, 101, 114, 110, 97, 108, 99, 104, 105, 108, 100, 114, 101, 110, 114, 101, 115, 101, 114, 118, 101, 100, 114, 101, 115, 101, 97, 114, 99, 104, 101, 120, 99, 104, 97, 110, 103, 101, 102, 97, 118, 111, 114, 105, 116, 101, 116, 101, 109, 112, 108, 97, 116, 101, 109, 105, 108, 105, 116, 97, 114, 121, 105, 110, 100, 117, 115, 116, 114, 121, 115, 101, 114, 118, 105, 99, 101, 115, 109, 97, 116, 101, 114, 105, 97, 108, 112, 114, 111, 100, 117, 99, 116, 115, 122, 45, 105, 110, 100, 101, 120, 58, 99, 111, 109, 109, 101, 110, 116, 115, 115, 111, 102, 116, 119, 97, 114, 101, 99, 111, 109, 112, 108, 101, 116, 101, 99, 97, 108, 101, 110, 100, 97, 114, 112, 108, 97, 116, 102, 111, 114, 109, 97, 114, 116, 105, 99, 108, 101, 115, 114, 101, 113, 117, 105, 114, 101, 100, 109, 111, 118, 101, 109, 101, 110, 116, 113, 117, 101, 115, 116, 105, 111, 110, 98, 117, 105, 108, 100, 105, 110, 103, 112, 111, 108, 105, 116, 105, 99, 115, 112, 111, 115, 115, 105, 98, 108, 101, 114, 101, 108, 105, 103, 105, 111, 110, 112, 104, 121, 115, 105, 99, 97, 108, 102, 101, 101, 100, 98, 97, 99, 107, 114, 101, 103, 105, 115, 116, 101, 114, 112, 105, 99, 116, 117, 114, 101, 115, 100, 105, 115, 97, 98, 108, 101, 100, 112, 114, 111, 116, 111, 99, 111, 108, 97, 117, 100, 105, 101, 110, 99, 101, 115, 101, 116, 116, 105, 110, 103, 115, 97, 99, 116, 105, 118, 105, 116, 121, 101, 108, 101, 109, 101, 110, 116, 115, 108, 101, 97, 114, 110, 105, 110, 103, 97, 110, 121, 116, 104, 105, 110, 103, 97, 98, 115, 116, 114, 97, 99, 116, 112, 114, 111, 103, 114, 101, 115, 115, 111, 118, 101, 114, 118, 105, 101, 119, 109, 97, 103, 97, 122, 105, 110, 101, 101, 99, 111, 110, 111, 109, 105, 99, 116, 114, 97, 105, 110, 105, 110, 103, 112, 114, 101, 115, 115, 117, 114, 101, 118, 97, 114, 105, 111, 117, 115, 32, 60, 115, 116, 114, 111, 110, 103, 62, 112, 114, 111, 112, 101, 114, 116, 121, 115, 104, 111, 112, 112, 105, 110, 103, 116, 111, 103, 101, 116, 104, 101, 114, 97, 100, 118, 97, 110, 99, 101, 100, 98, 101, 104, 97, 118, 105, 111, 114, 100, 111, 119, 110, 108, 111, 97, 100, 102, 101, 97, 116, 117, 114, 101, 100, 102, 111, 111, 116, 98, 97, 108, 108, 115, 101, 108, 101, 99, 116, 101, 100, 76, 97, 110, 103, 117, 97, 103, 101, 100, 105, 115, 116, 97, 110, 99, 101, 114, 101, 109, 101, 109, 98, 101, 114, 116, 114, 97, 99, 107, 105, 110, 103, 112, 97, 115, 115, 119, 111, 114, 100, 109, 111, 100, 105, 102, 105, 101, 100, 115, 116, 117, 100, 101, 110, 116, 115, 100, 105, 114, 101, 99, 116, 108, 121, 102, 105, 103, 104, 116, 105, 110, 103, 110, 111, 114, 116, 104, 101, 114, 110, 100, 97, 116, 97, 98, 97, 115, 101, 102, 101, 115, 116, 105, 118, 97, 108, 98, 114, 101, 97, 107, 105, 110, 103, 108, 111, 99, 97, 116, 105, 111, 110, 105, 110, 116, 101, 114, 110, 101, 116, 100, 114, 111, 112, 100, 111, 119, 110, 112, 114, 97, 99, 116, 105, 99, 101, 101, 118, 105, 100, 101, 110, 99, 101, 102, 117, 110, 99, 116, 105, 111, 110, 109, 97, 114, 114, 105, 97, 103, 101, 114, 101, 115, 112, 111, 110, 115, 101, 112, 114, 111, 98, 108, 101, 109, 115, 110, 101, 103, 97, 116, 105, 118, 101, 112, 114, 111, 103, 114, 97, 109, 115, 97, 110, 97, 108, 121, 115, 105, 115, 114, 101, 108, 101, 97, 115, 101, 100, 98, 97, 110, 110, 101, 114, 34, 62, 112, 117, 114, 99, 104, 97, 115, 101, 112, 111, 108, 105, 99, 105, 101, 115, 114, 101, 103, 105, 111, 110, 97, 108, 99, 114, 101, 97, 116, 105, 118, 101, 97, 114, 103, 117, 109, 101, 110, 116, 98, 111, 111, 107, 109, 97, 114, 107, 114, 101, 102, 101, 114, 114, 101, 114, 99, 104, 101, 109, 105, 99, 97, 108, 100, 105, 118, 105, 115, 105, 111, 110, 99, 97, 108, 108, 98, 97, 99, 107, 115, 101, 112, 97, 114, 97, 116, 101, 112, 114, 111, 106, 101, 99, 116, 115, 99, 111, 110, 102, 108, 105, 99, 116, 104, 97, 114, 100, 119, 97, 114, 101, 105, 110, 116, 101, 114, 101, 115, 116, 100, 101, 108, 105, 118, 101, 114, 121, 109, 111, 117, 110, 116, 97, 105, 110, 111, 98, 116, 97, 105, 110, 101, 100, 61, 32, 102, 97, 108, 115, 101, 59, 102, 111, 114, 40, 118, 97, 114, 32, 97, 99, 99, 101, 112, 116, 101, 100, 99, 97, 112, 97, 99, 105, 116, 121, 99, 111, 109, 112, 117, 116, 101, 114, 105, 100, 101, 110, 116, 105, 116, 121, 97, 105, 114, 99, 114, 97, 102, 116, 101, 109, 112, 108, 111, 121, 101, 100, 112, 114, 111, 112, 111, 115, 101, 100, 100, 111, 109, 101, 115, 116, 105, 99, 105, 110, 99, 108, 117, 100, 101, 115, 112, 114, 111, 118, 105, 100, 101, 100, 104, 111, 115, 112, 105, 116, 97, 108, 118, 101, 114, 116, 105, 99, 97, 108, 99, 111, 108, 108, 97, 112, 115, 101, 97, 112, 112, 114, 111, 97, 99, 104, 112, 97, 114, 116, 110, 101, 114, 115, 108, 111, 103, 111, 34, 62, 60, 97, 100, 97, 117, 103, 104, 116, 101, 114, 97, 117, 116, 104, 111, 114, 34, 32, 99, 117, 108, 116, 117, 114, 97, 108, 102, 97, 109, 105, 108, 105, 101, 115, 47, 105, 109, 97, 103, 101, 115, 47, 97, 115, 115, 101, 109, 98, 108, 121, 112, 111, 119, 101, 114, 102, 117, 108, 116, 101, 97, 99, 104, 105, 110, 103, 102, 105, 110, 105, 115, 104, 101, 100, 100, 105, 115, 116, 114, 105, 99, 116, 99, 114, 105, 116, 105, 99, 97, 108, 99, 103, 105, 45, 98, 105, 110, 47, 112, 117, 114, 112, 111, 115, 101, 115, 114, 101, 113, 117, 105, 114, 101, 115, 101, 108, 101, 99, 116, 105, 111, 110, 98, 101, 99, 111, 109, 105, 110, 103, 112, 114, 111, 118, 105, 100, 101, 115, 97, 99, 97, 100, 101, 109, 105, 99, 101, 120, 101, 114, 99, 105, 115, 101, 97, 99, 116, 117, 97, 108, 108, 121, 109, 101, 100, 105, 99, 105, 110, 101, 99, 111, 110, 115, 116, 97, 110, 116, 97, 99, 99, 105, 100, 101, 110, 116, 77, 97, 103, 97, 122, 105, 110, 101, 100, 111, 99, 117, 109, 101, 110, 116, 115, 116, 97, 114, 116, 105, 110, 103, 98, 111, 116, 116, 111, 109, 34, 62, 111, 98, 115, 101, 114, 118, 101, 100, 58, 32, 38, 113, 117, 111, 116, 59, 101, 120, 116, 101, 110, 100, 101, 100, 112, 114, 101, 118, 105, 111, 117, 115, 83, 111, 102, 116, 119, 97, 114, 101, 99, 117, 115, 116, 111, 109, 101, 114, 100, 101, 99, 105, 115, 105, 111, 110, 115, 116, 114, 101, 110, 103, 116, 104, 100, 101, 116, 97, 105, 108, 101, 100, 115, 108, 105, 103, 104, 116, 108, 121, 112, 108, 97, 110, 110, 105, 110, 103, 116, 101, 120, 116, 97, 114, 101, 97, 99, 117, 114, 114, 101, 110, 99, 121, 101, 118, 101, 114, 121, 111, 110, 101, 115, 116, 114, 97, 105, 103, 104, 116, 116, 114, 97, 110, 115, 102, 101, 114, 112, 111, 115, 105, 116, 105, 118, 101, 112, 114, 111, 100, 117, 99, 101, 100, 104, 101, 114, 105, 116, 97, 103, 101, 115, 104, 105, 112, 112, 105, 110, 103, 97, 98, 115, 111, 108, 117, 116, 101, 114, 101, 99, 101, 105, 118, 101, 100, 114, 101, 108, 101, 118, 97, 110, 116, 98, 117, 116, 116, 111, 110, 34, 32, 118, 105, 111, 108, 101, 110, 99, 101, 97, 110, 121, 119, 104, 101, 114, 101, 98, 101, 110, 101, 102, 105, 116, 115, 108, 97, 117, 110, 99, 104, 101, 100, 114, 101, 99, 101, 110, 116, 108, 121, 97, 108, 108, 105, 97, 110, 99, 101, 102, 111, 108, 108, 111, 119, 101, 100, 109, 117, 108, 116, 105, 112, 108, 101, 98, 117, 108, 108, 101, 116, 105, 110, 105, 110, 99, 108, 117, 100, 101, 100, 111, 99, 99, 117, 114, 114, 101, 100, 105, 110, 116, 101, 114, 110, 97, 108, 36, 40, 116, 104, 105, 115, 41, 46, 114, 101, 112, 117, 98, 108, 105, 99, 62, 60, 116, 114, 62, 60, 116, 100, 99, 111, 110, 103, 114, 101, 115, 115, 114, 101, 99, 111, 114, 100, 101, 100, 117, 108, 116, 105, 109, 97, 116, 101, 115, 111, 108, 117, 116, 105, 111, 110, 60, 117, 108, 32, 105, 100, 61, 34, 100, 105, 115, 99, 111, 118, 101, 114, 72, 111, 109, 101, 60, 47, 97, 62, 119, 101, 98, 115, 105, 116, 101, 115, 110, 101, 116, 119, 111, 114, 107, 115, 97, 108, 116, 104, 111, 117, 103, 104, 101, 110, 116, 105, 114, 101, 108, 121, 109, 101, 109, 111, 114, 105, 97, 108, 109, 101, 115, 115, 97, 103, 101, 115, 99, 111, 110, 116, 105, 110, 117, 101, 97, 99, 116, 105, 118, 101, 34, 62, 115, 111, 109, 101, 119, 104, 97, 116, 118, 105, 99, 116, 111, 114, 105, 97, 87, 101, 115, 116, 101, 114, 110, 32, 32, 116, 105, 116, 108, 101, 61, 34, 76, 111, 99, 97, 116, 105, 111, 110, 99, 111, 110, 116, 114, 97, 99, 116, 118, 105, 115, 105, 116, 111, 114, 115, 68, 111, 119, 110, 108, 111, 97, 100, 119, 105, 116, 104, 111, 117, 116, 32, 114, 105, 103, 104, 116, 34, 62, 10, 109, 101, 97, 115, 117, 114, 101, 115, 119, 105, 100, 116, 104, 32, 61, 32, 118, 97, 114, 105, 97, 98, 108, 101, 105, 110, 118, 111, 108, 118, 101, 100, 118, 105, 114, 103, 105, 110, 105, 97, 110, 111, 114, 109, 97, 108, 108, 121, 104, 97, 112, 112, 101, 110, 101, 100, 97, 99, 99, 111, 117, 110, 116, 115, 115, 116, 97, 110, 100, 105, 110, 103, 110, 97, 116, 105, 111, 110, 97, 108, 82, 101, 103, 105, 115, 116, 101, 114, 112, 114, 101, 112, 97, 114, 101, 100, 99, 111, 110, 116, 114, 111, 108, 115, 97, 99, 99, 117, 114, 97, 116, 101, 98, 105, 114, 116, 104, 100, 97, 121, 115, 116, 114, 97, 116, 101, 103, 121, 111, 102, 102, 105, 99, 105, 97, 108, 103, 114, 97, 112, 104, 105, 99, 115, 99, 114, 105, 109, 105, 110, 97, 108, 112, 111, 115, 115, 105, 98, 108, 121, 99, 111, 110, 115, 117, 109, 101, 114, 80, 101, 114, 115, 111, 110, 97, 108, 115, 112, 101, 97, 107, 105, 110, 103, 118, 97, 108, 105, 100, 97, 116, 101, 97, 99, 104, 105, 101, 118, 101, 100, 46, 106, 112, 103, 34, 32, 47, 62, 109, 97, 99, 104, 105, 110, 101, 115, 60, 47, 104, 50, 62, 10, 32, 32, 107, 101, 121, 119, 111, 114, 100, 115, 102, 114, 105, 101, 110, 100, 108, 121, 98, 114, 111, 116, 104, 101, 114, 115, 99, 111, 109, 98, 105, 110, 101, 100, 111, 114, 105, 103, 105, 110, 97, 108, 99, 111, 109, 112, 111, 115, 101, 100, 101, 120, 112, 101, 99, 116, 101, 100, 97, 100, 101, 113, 117, 97, 116, 101, 112, 97, 107, 105, 115, 116, 97, 110, 102, 111, 108, 108, 111, 119, 34, 32, 118, 97, 108, 117, 97, 98, 108, 101, 60, 47, 108, 97, 98, 101, 108, 62, 114, 101, 108, 97, 116, 105, 118, 101, 98, 114, 105, 110, 103, 105, 110, 103, 105, 110, 99, 114, 101, 97, 115, 101, 103, 111, 118, 101, 114, 110, 111, 114, 112, 108, 117, 103, 105, 110, 115, 47, 76, 105, 115, 116, 32, 111, 102, 32, 72, 101, 97, 100, 101, 114, 34, 62, 34, 32, 110, 97, 109, 101, 61, 34, 32, 40, 38, 113, 117, 111, 116, 59, 103, 114, 97, 100, 117, 97, 116, 101, 60, 47, 104, 101, 97, 100, 62, 10, 99, 111, 109, 109, 101, 114, 99, 101, 109, 97, 108, 97, 121, 115, 105, 97, 100, 105, 114, 101, 99, 116, 111, 114, 109, 97, 105, 110, 116, 97, 105, 110, 59, 104, 101, 105, 103, 104, 116, 58, 115, 99, 104, 101, 100, 117, 108, 101, 99, 104, 97, 110, 103, 105, 110, 103, 98, 97, 99, 107, 32, 116, 111, 32, 99, 97, 116, 104, 111, 108, 105, 99, 112, 97, 116, 116, 101, 114, 110, 115, 99, 111, 108, 111, 114, 58, 32, 35, 103, 114, 101, 97, 116, 101, 115, 116, 115, 117, 112, 112, 108, 105, 101, 115, 114, 101, 108, 105, 97, 98, 108, 101, 60, 47, 117, 108, 62, 10, 9, 9, 60, 115, 101, 108, 101, 99, 116, 32, 99, 105, 116, 105, 122, 101, 110, 115, 99, 108, 111, 116, 104, 105, 110, 103, 119, 97, 116, 99, 104, 105, 110, 103, 60, 108, 105, 32, 105, 100, 61, 34, 115, 112, 101, 99, 105, 102, 105, 99, 99, 97, 114, 114, 121, 105, 110, 103, 115, 101, 110, 116, 101, 110, 99, 101, 60, 99, 101, 110, 116, 101, 114, 62, 99, 111, 110, 116, 114, 97, 115, 116, 116, 104, 105, 110, 107, 105, 110, 103, 99, 97, 116, 99, 104, 40, 101, 41, 115, 111, 117, 116, 104, 101, 114, 110, 77, 105, 99, 104, 97, 101, 108, 32, 109, 101, 114, 99, 104, 97, 110, 116, 99, 97, 114, 111, 117, 115, 101, 108, 112, 97, 100, 100, 105, 110, 103, 58, 105, 110, 116, 101, 114, 105, 111, 114, 46, 115, 112, 108, 105, 116, 40, 34, 108, 105, 122, 97, 116, 105, 111, 110, 79, 99, 116, 111, 98, 101, 114, 32, 41, 123, 114, 101, 116, 117, 114, 110, 105, 109, 112, 114, 111, 118, 101, 100, 45, 45, 38, 103, 116, 59, 10, 10, 99, 111, 118, 101, 114, 97, 103, 101, 99, 104, 97, 105, 114, 109, 97, 110, 46, 112, 110, 103, 34, 32, 47, 62, 115, 117, 98, 106, 101, 99, 116, 115, 82, 105, 99, 104, 97, 114, 100, 32, 119, 104, 97, 116, 101, 118, 101, 114, 112, 114, 111, 98, 97, 98, 108, 121, 114, 101, 99, 111, 118, 101, 114, 121, 98, 97, 115, 101, 98, 97, 108, 108, 106, 117, 100, 103, 109, 101, 110, 116, 99, 111, 110, 110, 101, 99, 116, 46, 46, 99, 115, 115, 34, 32, 47, 62, 32, 119, 101, 98, 115, 105, 116, 101, 114, 101, 112, 111, 114, 116, 101, 100, 100, 101, 102, 97, 117, 108, 116, 34, 47, 62, 60, 47, 97, 62, 13, 10, 101, 108, 101, 99, 116, 114, 105, 99, 115, 99, 111, 116, 108, 97, 110, 100, 99, 114, 101, 97, 116, 105, 111, 110, 113, 117, 97, 110, 116, 105, 116, 121, 46, 32, 73, 83, 66, 78, 32, 48, 100, 105, 100, 32, 110, 111, 116, 32, 105, 110, 115, 116, 97, 110, 99, 101, 45, 115, 101, 97, 114, 99, 104, 45, 34, 32, 108, 97, 110, 103, 61, 34, 115, 112, 101, 97, 107, 101, 114, 115, 67, 111, 109, 112, 117, 116, 101, 114, 99, 111, 110, 116, 97, 105, 110, 115, 97, 114, 99, 104, 105, 118, 101, 115, 109, 105, 110, 105, 115, 116, 101, 114, 114, 101, 97, 99, 116, 105, 111, 110, 100, 105, 115, 99, 111, 117, 110, 116, 73, 116, 97, 108, 105, 97, 110, 111, 99, 114, 105, 116, 101, 114, 105, 97, 115, 116, 114, 111, 110, 103, 108, 121, 58, 32, 39, 104, 116, 116, 112, 58, 39, 115, 99, 114, 105, 112, 116, 39, 99, 111, 118, 101, 114, 105, 110, 103, 111, 102, 102, 101, 114, 105, 110, 103, 97, 112, 112, 101, 97, 114, 101, 100, 66, 114, 105, 116, 105, 115, 104, 32, 105, 100, 101, 110, 116, 105, 102, 121, 70, 97, 99, 101, 98, 111, 111, 107, 110, 117, 109, 101, 114, 111, 117, 115, 118, 101, 104, 105, 99, 108, 101, 115, 99, 111, 110, 99, 101, 114, 110, 115, 65, 109, 101, 114, 105, 99, 97, 110, 104, 97, 110, 100, 108, 105, 110, 103, 100, 105, 118, 32, 105, 100, 61, 34, 87, 105, 108, 108, 105, 97, 109, 32, 112, 114, 111, 118, 105, 100, 101, 114, 95, 99, 111, 110, 116, 101, 110, 116, 97, 99, 99, 117, 114, 97, 99, 121, 115, 101, 99, 116, 105, 111, 110, 32, 97, 110, 100, 101, 114, 115, 111, 110, 102, 108, 101, 120, 105, 98, 108, 101, 67, 97, 116, 101, 103, 111, 114, 121, 108, 97, 119, 114, 101, 110, 99, 101, 60, 115, 99, 114, 105, 112, 116, 62, 108, 97, 121, 111, 117, 116, 61, 34, 97, 112, 112, 114, 111, 118, 101, 100, 32, 109, 97, 120, 105, 109, 117, 109, 104, 101, 97, 100, 101, 114, 34, 62, 60, 47, 116, 97, 98, 108, 101, 62, 83, 101, 114, 118, 105, 99, 101, 115, 104, 97, 109, 105, 108, 116, 111, 110, 99, 117, 114, 114, 101, 110, 116, 32, 99, 97, 110, 97, 100, 105, 97, 110, 99, 104, 97, 110, 110, 101, 108, 115, 47, 116, 104, 101, 109, 101, 115, 47, 47, 97, 114, 116, 105, 99, 108, 101, 111, 112, 116, 105, 111, 110, 97, 108, 112, 111, 114, 116, 117, 103, 97, 108, 118, 97, 108, 117, 101, 61, 34, 34, 105, 110, 116, 101, 114, 118, 97, 108, 119, 105, 114, 101, 108, 101, 115, 115, 101, 110, 116, 105, 116, 108, 101, 100, 97, 103, 101, 110, 99, 105, 101, 115, 83, 101, 97, 114, 99, 104, 34, 32, 109, 101, 97, 115, 117, 114, 101, 100, 116, 104, 111, 117, 115, 97, 110, 100, 115, 112, 101, 110, 100, 105, 110, 103, 38, 104, 101, 108, 108, 105, 112, 59, 110, 101, 119, 32, 68, 97, 116, 101, 34, 32, 115, 105, 122, 101, 61, 34, 112, 97, 103, 101, 78, 97, 109, 101, 109, 105, 100, 100, 108, 101, 34, 32, 34, 32, 47, 62, 60, 47, 97, 62, 104, 105, 100, 100, 101, 110, 34, 62, 115, 101, 113, 117, 101, 110, 99, 101, 112, 101, 114, 115, 111, 110, 97, 108, 111, 118, 101, 114, 102, 108, 111, 119, 111, 112, 105, 110, 105, 111, 110, 115, 105, 108, 108, 105, 110, 111, 105, 115, 108, 105, 110, 107, 115, 34, 62, 10, 9, 60, 116, 105, 116, 108, 101, 62, 118, 101, 114, 115, 105, 111, 110, 115, 115, 97, 116, 117, 114, 100, 97, 121, 116, 101, 114, 109, 105, 110, 97, 108, 105, 116, 101, 109, 112, 114, 111, 112, 101, 110, 103, 105, 110, 101, 101, 114, 115, 101, 99, 116, 105, 111, 110, 115, 100, 101, 115, 105, 103, 110, 101, 114, 112, 114, 111, 112, 111, 115, 97, 108, 61, 34, 102, 97, 108, 115, 101, 34, 69, 115, 112, 97, 195, 177, 111, 108, 114, 101, 108, 101, 97, 115, 101, 115, 115, 117, 98, 109, 105, 116, 34, 32, 101, 114, 38, 113, 117, 111, 116, 59, 97, 100, 100, 105, 116, 105, 111, 110, 115, 121, 109, 112, 116, 111, 109, 115, 111, 114, 105, 101, 110, 116, 101, 100, 114, 101, 115, 111, 117, 114, 99, 101, 114, 105, 103, 104, 116, 34, 62, 60, 112, 108, 101, 97, 115, 117, 114, 101, 115, 116, 97, 116, 105, 111, 110, 115, 104, 105, 115, 116, 111, 114, 121, 46, 108, 101, 97, 118, 105, 110, 103, 32, 32, 98, 111, 114, 100, 101, 114, 61, 99, 111, 110, 116, 101, 110, 116, 115, 99, 101, 110, 116, 101, 114, 34, 62, 46, 10, 10, 83, 111, 109, 101, 32, 100, 105, 114, 101, 99, 116, 101, 100, 115, 117, 105, 116, 97, 98, 108, 101, 98, 117, 108, 103, 97, 114, 105, 97, 46, 115, 104, 111, 119, 40, 41, 59, 100, 101, 115, 105, 103, 110, 101, 100, 71, 101, 110, 101, 114, 97, 108, 32, 99, 111, 110, 99, 101, 112, 116, 115, 69, 120, 97, 109, 112, 108, 101, 115, 119, 105, 108, 108, 105, 97, 109, 115, 79, 114, 105, 103, 105, 110, 97, 108, 34, 62, 60, 115, 112, 97, 110, 62, 115, 101, 97, 114, 99, 104, 34, 62, 111, 112, 101, 114, 97, 116, 111, 114, 114, 101, 113, 117, 101, 115, 116, 115, 97, 32, 38, 113, 117, 111, 116, 59, 97, 108, 108, 111, 119, 105, 110, 103, 68, 111, 99, 117, 109, 101, 110, 116, 114, 101, 118, 105, 115, 105, 111, 110, 46, 32, 10, 10, 84, 104, 101, 32, 121, 111, 117, 114, 115, 101, 108, 102, 67, 111, 110, 116, 97, 99, 116, 32, 109, 105, 99, 104, 105, 103, 97, 110, 69, 110, 103, 108, 105, 115, 104, 32, 99, 111, 108, 117, 109, 98, 105, 97, 112, 114, 105, 111, 114, 105, 116, 121, 112, 114, 105, 110, 116, 105, 110, 103, 100, 114, 105, 110, 107, 105, 110, 103, 102, 97, 99, 105, 108, 105, 116, 121, 114, 101, 116, 117, 114, 110, 101, 100, 67, 111, 110, 116, 101, 110, 116, 32, 111, 102, 102, 105, 99, 101, 114, 115, 82, 117, 115, 115, 105, 97, 110, 32, 103, 101, 110, 101, 114, 97, 116, 101, 45, 56, 56, 53, 57, 45, 49, 34, 105, 110, 100, 105, 99, 97, 116, 101, 102, 97, 109, 105, 108, 105, 97, 114, 32, 113, 117, 97, 108, 105, 116, 121, 109, 97, 114, 103, 105, 110, 58, 48, 32, 99, 111, 110, 116, 101, 110, 116, 118, 105, 101, 119, 112, 111, 114, 116, 99, 111, 110, 116, 97, 99, 116, 115, 45, 116, 105, 116, 108, 101, 34, 62, 112, 111, 114, 116, 97, 98, 108, 101, 46, 108, 101, 110, 103, 116, 104, 32, 101, 108, 105, 103, 105, 98, 108, 101, 105, 110, 118, 111, 108, 118, 101, 115, 97, 116, 108, 97, 110, 116, 105, 99, 111, 110, 108, 111, 97, 100, 61, 34, 100, 101, 102, 97, 117, 108, 116, 46, 115, 117, 112, 112, 108, 105, 101, 100, 112, 97, 121, 109, 101, 110, 116, 115, 103, 108, 111, 115, 115, 97, 114, 121, 10, 10, 65, 102, 116, 101, 114, 32, 103, 117, 105, 100, 97, 110, 99, 101, 60, 47, 116, 100, 62, 60, 116, 100, 101, 110, 99, 111, 100, 105, 110, 103, 109, 105, 100, 100, 108, 101, 34, 62, 99, 97, 109, 101, 32, 116, 111, 32, 100, 105, 115, 112, 108, 97, 121, 115, 115, 99, 111, 116, 116, 105, 115, 104, 106, 111, 110, 97, 116, 104, 97, 110, 109, 97, 106, 111, 114, 105, 116, 121, 119, 105, 100, 103, 101, 116, 115, 46, 99, 108, 105, 110, 105, 99, 97, 108, 116, 104, 97, 105, 108, 97, 110, 100, 116, 101, 97, 99, 104, 101, 114, 115, 60, 104, 101, 97, 100, 62, 10, 9, 97, 102, 102, 101, 99, 116, 101, 100, 115, 117, 112, 112, 111, 114, 116, 115, 112, 111, 105, 110, 116, 101, 114, 59, 116, 111, 83, 116, 114, 105, 110, 103, 60, 47, 115, 109, 97, 108, 108, 62, 111, 107, 108, 97, 104, 111, 109, 97, 119, 105, 108, 108, 32, 98, 101, 32, 105, 110, 118, 101, 115, 116, 111, 114, 48, 34, 32, 97, 108, 116, 61, 34, 104, 111, 108, 105, 100, 97, 121, 115, 82, 101, 115, 111, 117, 114, 99, 101, 108, 105, 99, 101, 110, 115, 101, 100, 32, 40, 119, 104, 105, 99, 104, 32, 46, 32, 65, 102, 116, 101, 114, 32, 99, 111, 110, 115, 105, 100, 101, 114, 118, 105, 115, 105, 116, 105, 110, 103, 101, 120, 112, 108, 111, 114, 101, 114, 112, 114, 105, 109, 97, 114, 121, 32, 115, 101, 97, 114, 99, 104, 34, 32, 97, 110, 100, 114, 111, 105, 100, 34, 113, 117, 105, 99, 107, 108, 121, 32, 109, 101, 101, 116, 105, 110, 103, 115, 101, 115, 116, 105, 109, 97, 116, 101, 59, 114, 101, 116, 117, 114, 110, 32, 59, 99, 111, 108, 111, 114, 58, 35, 32, 104, 101, 105, 103, 104, 116, 61, 97, 112, 112, 114, 111, 118, 97, 108, 44, 32, 38, 113, 117, 111, 116, 59, 32, 99, 104, 101, 99, 107, 101, 100, 46, 109, 105, 110, 46, 106, 115, 34, 109, 97, 103, 110, 101, 116, 105, 99, 62, 60, 47, 97, 62, 60, 47, 104, 102, 111, 114, 101, 99, 97, 115, 116, 46, 32, 87, 104, 105, 108, 101, 32, 116, 104, 117, 114, 115, 100, 97, 121, 100, 118, 101, 114, 116, 105, 115, 101, 38, 101, 97, 99, 117, 116, 101, 59, 104, 97, 115, 67, 108, 97, 115, 115, 101, 118, 97, 108, 117, 97, 116, 101, 111, 114, 100, 101, 114, 105, 110, 103, 101, 120, 105, 115, 116, 105, 110, 103, 112, 97, 116, 105, 101, 110, 116, 115, 32, 79, 110, 108, 105, 110, 101, 32, 99, 111, 108, 111, 114, 97, 100, 111, 79, 112, 116, 105, 111, 110, 115, 34, 99, 97, 109, 112, 98, 101, 108, 108, 60, 33, 45, 45, 32, 101, 110, 100, 60, 47, 115, 112, 97, 110, 62, 60, 60, 98, 114, 32, 47, 62, 13, 10, 95, 112, 111, 112, 117, 112, 115, 124, 115, 99, 105, 101, 110, 99, 101, 115, 44, 38, 113, 117, 111, 116, 59, 32, 113, 117, 97, 108, 105, 116, 121, 32, 87, 105, 110, 100, 111, 119, 115, 32, 97, 115, 115, 105, 103, 110, 101, 100, 104, 101, 105, 103, 104, 116, 58, 32, 60, 98, 32, 99, 108, 97, 115, 115, 108, 101, 38, 113, 117, 111, 116, 59, 32, 118, 97, 108, 117, 101, 61, 34, 32, 67, 111, 109, 112, 97, 110, 121, 101, 120, 97, 109, 112, 108, 101, 115, 60, 105, 102, 114, 97, 109, 101, 32, 98, 101, 108, 105, 101, 118, 101, 115, 112, 114, 101, 115, 101, 110, 116, 115, 109, 97, 114, 115, 104, 97, 108, 108, 112, 97, 114, 116, 32, 111, 102, 32, 112, 114, 111, 112, 101, 114, 108, 121, 41, 46, 10, 10, 84, 104, 101, 32, 116, 97, 120, 111, 110, 111, 109, 121, 109, 117, 99, 104, 32, 111, 102, 32, 60, 47, 115, 112, 97, 110, 62, 10, 34, 32, 100, 97, 116, 97, 45, 115, 114, 116, 117, 103, 117, 195, 170, 115, 115, 99, 114, 111, 108, 108, 84, 111, 32, 112, 114, 111, 106, 101, 99, 116, 60, 104, 101, 97, 100, 62, 13, 10, 97, 116, 116, 111, 114, 110, 101, 121, 101, 109, 112, 104, 97, 115, 105, 115, 115, 112, 111, 110, 115, 111, 114, 115, 102, 97, 110, 99, 121, 98, 111, 120, 119, 111, 114, 108, 100, 39, 115, 32, 119, 105, 108, 100, 108, 105, 102, 101, 99, 104, 101, 99, 107, 101, 100, 61, 115, 101, 115, 115, 105, 111, 110, 115, 112, 114, 111, 103, 114, 97, 109, 109, 112, 120, 59, 102, 111, 110, 116, 45, 32, 80, 114, 111, 106, 101, 99, 116, 106, 111, 117, 114, 110, 97, 108, 115, 98, 101, 108, 105, 101, 118, 101, 100, 118, 97, 99, 97, 116, 105, 111, 110, 116, 104, 111, 109, 112, 115, 111, 110, 108, 105, 103, 104, 116, 105, 110, 103, 97, 110, 100, 32, 116, 104, 101, 32, 115, 112, 101, 99, 105, 97, 108, 32, 98, 111, 114, 100, 101, 114, 61, 48, 99, 104, 101, 99, 107, 105, 110, 103, 60, 47, 116, 98, 111, 100, 121, 62, 60, 98, 117, 116, 116, 111, 110, 32, 67, 111, 109, 112, 108, 101, 116, 101, 99, 108, 101, 97, 114, 102, 105, 120, 10, 60, 104, 101, 97, 100, 62, 10, 97, 114, 116, 105, 99, 108, 101, 32, 60, 115, 101, 99, 116, 105, 111, 110, 102, 105, 110, 100, 105, 110, 103, 115, 114, 111, 108, 101, 32, 105, 110, 32, 112, 111, 112, 117, 108, 97, 114, 32, 32, 79, 99, 116, 111, 98, 101, 114, 119, 101, 98, 115, 105, 116, 101, 32, 101, 120, 112, 111, 115, 117, 114, 101, 117, 115, 101, 100, 32, 116, 111, 32, 32, 99, 104, 97, 110, 103, 101, 115, 111, 112, 101, 114, 97, 116, 101, 100, 99, 108, 105, 99, 107, 105, 110, 103, 101, 110, 116, 101, 114, 105, 110, 103, 99, 111, 109, 109, 97, 110, 100, 115, 105, 110, 102, 111, 114, 109, 101, 100, 32, 110, 117, 109, 98, 101, 114, 115, 32, 32, 60, 47, 100, 105, 118, 62, 99, 114, 101, 97, 116, 105, 110, 103, 111, 110, 83, 117, 98, 109, 105, 116, 109, 97, 114, 121, 108, 97, 110, 100, 99, 111, 108, 108, 101, 103, 101, 115, 97, 110, 97, 108, 121, 116, 105, 99, 108, 105, 115, 116, 105, 110, 103, 115, 99, 111, 110, 116, 97, 99, 116, 46, 108, 111, 103, 103, 101, 100, 73, 110, 97, 100, 118, 105, 115, 111, 114, 121, 115, 105, 98, 108, 105, 110, 103, 115, 99, 111, 110, 116, 101, 110, 116, 34, 115, 38, 113, 117, 111, 116, 59, 41, 115, 46, 32, 84, 104, 105, 115, 32, 112, 97, 99, 107, 97, 103, 101, 115, 99, 104, 101, 99, 107, 98, 111, 120, 115, 117, 103, 103, 101, 115, 116, 115, 112, 114, 101, 103, 110, 97, 110, 116, 116, 111, 109, 111, 114, 114, 111, 119, 115, 112, 97, 99, 105, 110, 103, 61, 105, 99, 111, 110, 46, 112, 110, 103, 106, 97, 112, 97, 110, 101, 115, 101, 99, 111, 100, 101, 98, 97, 115, 101, 98, 117, 116, 116, 111, 110, 34, 62, 103, 97, 109, 98, 108, 105, 110, 103, 115, 117, 99, 104, 32, 97, 115, 32, 44, 32, 119, 104, 105, 108, 101, 32, 60, 47, 115, 112, 97, 110, 62, 32, 109, 105, 115, 115, 111, 117, 114, 105, 115, 112, 111, 114, 116, 105, 110, 103, 116, 111, 112, 58, 49, 112, 120, 32, 46, 60, 47, 115, 112, 97, 110, 62, 116, 101, 110, 115, 105, 111, 110, 115, 119, 105, 100, 116, 104, 61, 34, 50, 108, 97, 122, 121, 108, 111, 97, 100, 110, 111, 118, 101, 109, 98, 101, 114, 117, 115, 101, 100, 32, 105, 110, 32, 104, 101, 105, 103, 104, 116, 61, 34, 99, 114, 105, 112, 116, 34, 62, 10, 38, 110, 98, 115, 112, 59, 60, 47, 60, 116, 114, 62, 60, 116, 100, 32, 104, 101, 105, 103, 104, 116, 58, 50, 47, 112, 114, 111, 100, 117, 99, 116, 99, 111, 117, 110, 116, 114, 121, 32, 105, 110, 99, 108, 117, 100, 101, 32, 102, 111, 111, 116, 101, 114, 34, 32, 38, 108, 116, 59, 33, 45, 45, 32, 116, 105, 116, 108, 101, 34, 62, 60, 47, 106, 113, 117, 101, 114, 121, 46, 60, 47, 102, 111, 114, 109, 62, 10, 40, 231, 174, 128, 228, 189, 147, 41, 40, 231, 185, 129, 233, 171, 148, 41, 104, 114, 118, 97, 116, 115, 107, 105, 105, 116, 97, 108, 105, 97, 110, 111, 114, 111, 109, 195, 162, 110, 196, 131, 116, 195, 188, 114, 107, 195, 167, 101, 216, 167, 216, 177, 216, 175, 217, 136, 116, 97, 109, 98, 105, 195, 169, 110, 110, 111, 116, 105, 99, 105, 97, 115, 109, 101, 110, 115, 97, 106, 101, 115, 112, 101, 114, 115, 111, 110, 97, 115, 100, 101, 114, 101, 99, 104, 111, 115, 110, 97, 99, 105, 111, 110, 97, 108, 115, 101, 114, 118, 105, 99, 105, 111, 99, 111, 110, 116, 97, 99, 116, 111, 117, 115, 117, 97, 114, 105, 111, 115, 112, 114, 111, 103, 114, 97, 109, 97, 103, 111, 98, 105, 101, 114, 110, 111, 101, 109, 112, 114, 101, 115, 97, 115, 97, 110, 117, 110, 99, 105, 111, 115, 118, 97, 108, 101, 110, 99, 105, 97, 99, 111, 108, 111, 109, 98, 105, 97, 100, 101, 115, 112, 117, 195, 169, 115, 100, 101, 112, 111, 114, 116, 101, 115, 112, 114, 111, 121, 101, 99, 116, 111, 112, 114, 111, 100, 117, 99, 116, 111, 112, 195, 186, 98, 108, 105, 99, 111, 110, 111, 115, 111, 116, 114, 111, 115, 104, 105, 115, 116, 111, 114, 105, 97, 112, 114, 101, 115, 101, 110, 116, 101, 109, 105, 108, 108, 111, 110, 101, 115, 109, 101, 100, 105, 97, 110, 116, 101, 112, 114, 101, 103, 117, 110, 116, 97, 97, 110, 116, 101, 114, 105, 111, 114, 114, 101, 99, 117, 114, 115, 111, 115, 112, 114, 111, 98, 108, 101, 109, 97, 115, 97, 110, 116, 105, 97, 103, 111, 110, 117, 101, 115, 116, 114, 111, 115, 111, 112, 105, 110, 105, 195, 179, 110, 105, 109, 112, 114, 105, 109, 105, 114, 109, 105, 101, 110, 116, 114, 97, 115, 97, 109, 195, 169, 114, 105, 99, 97, 118, 101, 110, 100, 101, 100, 111, 114, 115, 111, 99, 105, 101, 100, 97, 100, 114, 101, 115, 112, 101, 99, 116, 111, 114, 101, 97, 108, 105, 122, 97, 114, 114, 101, 103, 105, 115, 116, 114, 111, 112, 97, 108, 97, 98, 114, 97, 115, 105, 110, 116, 101, 114, 195, 169, 115, 101, 110, 116, 111, 110, 99, 101, 115, 101, 115, 112, 101, 99, 105, 97, 108, 109, 105, 101, 109, 98, 114, 111, 115, 114, 101, 97, 108, 105, 100, 97, 100, 99, 195, 179, 114, 100, 111, 98, 97, 122, 97, 114, 97, 103, 111, 122, 97, 112, 195, 161, 103, 105, 110, 97, 115, 115, 111, 99, 105, 97, 108, 101, 115, 98, 108, 111, 113, 117, 101, 97, 114, 103, 101, 115, 116, 105, 195, 179, 110, 97, 108, 113, 117, 105, 108, 101, 114, 115, 105, 115, 116, 101, 109, 97, 115, 99, 105, 101, 110, 99, 105, 97, 115, 99, 111, 109, 112, 108, 101, 116, 111, 118, 101, 114, 115, 105, 195, 179, 110, 99, 111, 109, 112, 108, 101, 116, 97, 101, 115, 116, 117, 100, 105, 111, 115, 112, 195, 186, 98, 108, 105, 99, 97, 111, 98, 106, 101, 116, 105, 118, 111, 97, 108, 105, 99, 97, 110, 116, 101, 98, 117, 115, 99, 97, 100, 111, 114, 99, 97, 110, 116, 105, 100, 97, 100, 101, 110, 116, 114, 97, 100, 97, 115, 97, 99, 99, 105, 111, 110, 101, 115, 97, 114, 99, 104, 105, 118, 111, 115, 115, 117, 112, 101, 114, 105, 111, 114, 109, 97, 121, 111, 114, 195, 173, 97, 97, 108, 101, 109, 97, 110, 105, 97, 102, 117, 110, 99, 105, 195, 179, 110, 195, 186, 108, 116, 105, 109, 111, 115, 104, 97, 99, 105, 101, 110, 100, 111, 97, 113, 117, 101, 108, 108, 111, 115, 101, 100, 105, 99, 105, 195, 179, 110, 102, 101, 114, 110, 97, 110, 100, 111, 97, 109, 98, 105, 101, 110, 116, 101, 102, 97, 99, 101, 98, 111, 111, 107, 110, 117, 101, 115, 116, 114, 97, 115, 99, 108, 105, 101, 110, 116, 101, 115, 112, 114, 111, 99, 101, 115, 111, 115, 98, 97, 115, 116, 97, 110, 116, 101, 112, 114, 101, 115, 101, 110, 116, 97, 114, 101, 112, 111, 114, 116, 97, 114, 99, 111, 110, 103, 114, 101, 115, 111, 112, 117, 98, 108, 105, 99, 97, 114, 99, 111, 109, 101, 114, 99, 105, 111, 99, 111, 110, 116, 114, 97, 116, 111, 106, 195, 179, 118, 101, 110, 101, 115, 100, 105, 115, 116, 114, 105, 116, 111, 116, 195, 169, 99, 110, 105, 99, 97, 99, 111, 110, 106, 117, 110, 116, 111, 101, 110, 101, 114, 103, 195, 173, 97, 116, 114, 97, 98, 97, 106, 97, 114, 97, 115, 116, 117, 114, 105, 97, 115, 114, 101, 99, 105, 101, 110, 116, 101, 117, 116, 105, 108, 105, 122, 97, 114, 98, 111, 108, 101, 116, 195, 173, 110, 115, 97, 108, 118, 97, 100, 111, 114, 99, 111, 114, 114, 101, 99, 116, 97, 116, 114, 97, 98, 97, 106, 111, 115, 112, 114, 105, 109, 101, 114, 111, 115, 110, 101, 103, 111, 99, 105, 111, 115, 108, 105, 98, 101, 114, 116, 97, 100, 100, 101, 116, 97, 108, 108, 101, 115, 112, 97, 110, 116, 97, 108, 108, 97, 112, 114, 195, 179, 120, 105, 109, 111, 97, 108, 109, 101, 114, 195, 173, 97, 97, 110, 105, 109, 97, 108, 101, 115, 113, 117, 105, 195, 169, 110, 101, 115, 99, 111, 114, 97, 122, 195, 179, 110, 115, 101, 99, 99, 105, 195, 179, 110, 98, 117, 115, 99, 97, 110, 100, 111, 111, 112, 99, 105, 111, 110, 101, 115, 101, 120, 116, 101, 114, 105, 111, 114, 99, 111, 110, 99, 101, 112, 116, 111, 116, 111, 100, 97, 118, 195, 173, 97, 103, 97, 108, 101, 114, 195, 173, 97, 101, 115, 99, 114, 105, 98, 105, 114, 109, 101, 100, 105, 99, 105, 110, 97, 108, 105, 99, 101, 110, 99, 105, 97, 99, 111, 110, 115, 117, 108, 116, 97, 97, 115, 112, 101, 99, 116, 111, 115, 99, 114, 195, 173, 116, 105, 99, 97, 100, 195, 179, 108, 97, 114, 101, 115, 106, 117, 115, 116, 105, 99, 105, 97, 100, 101, 98, 101, 114, 195, 161, 110, 112, 101, 114, 195, 173, 111, 100, 111, 110, 101, 99, 101, 115, 105, 116, 97, 109, 97, 110, 116, 101, 110, 101, 114, 112, 101, 113, 117, 101, 195, 177, 111, 114, 101, 99, 105, 98, 105, 100, 97, 116, 114, 105, 98, 117, 110, 97, 108, 116, 101, 110, 101, 114, 105, 102, 101, 99, 97, 110, 99, 105, 195, 179, 110, 99, 97, 110, 97, 114, 105, 97, 115, 100, 101, 115, 99, 97, 114, 103, 97, 100, 105, 118, 101, 114, 115, 111, 115, 109, 97, 108, 108, 111, 114, 99, 97, 114, 101, 113, 117, 105, 101, 114, 101, 116, 195, 169, 99, 110, 105, 99, 111, 100, 101, 98, 101, 114, 195, 173, 97, 118, 105, 118, 105, 101, 110, 100, 97, 102, 105, 110, 97, 110, 122, 97, 115, 97, 100, 101, 108, 97, 110, 116, 101, 102, 117, 110, 99, 105, 111, 110, 97, 99, 111, 110, 115, 101, 106, 111, 115, 100, 105, 102, 195, 173, 99, 105, 108, 99, 105, 117, 100, 97, 100, 101, 115, 97, 110, 116, 105, 103, 117, 97, 115, 97, 118, 97, 110, 122, 97, 100, 97, 116, 195, 169, 114, 109, 105, 110, 111, 117, 110, 105, 100, 97, 100, 101, 115, 115, 195, 161, 110, 99, 104, 101, 122, 99, 97, 109, 112, 97, 195, 177, 97, 115, 111, 102, 116, 111, 110, 105, 99, 114, 101, 118, 105, 115, 116, 97, 115, 99, 111, 110, 116, 105, 101, 110, 101, 115, 101, 99, 116, 111, 114, 101, 115, 109, 111, 109, 101, 110, 116, 111, 115, 102, 97, 99, 117, 108, 116, 97, 100, 99, 114, 195, 169, 100, 105, 116, 111, 100, 105, 118, 101, 114, 115, 97, 115, 115, 117, 112, 117, 101, 115, 116, 111, 102, 97, 99, 116, 111, 114, 101, 115, 115, 101, 103, 117, 110, 100, 111, 115, 112, 101, 113, 117, 101, 195, 177, 97, 208, 179, 208, 190, 208, 180, 208, 176, 208, 181, 209, 129, 208, 187, 208, 184, 208, 181, 209, 129, 209, 130, 209, 140, 208, 177, 209, 139, 208, 187, 208, 190, 208, 177, 209, 139, 209, 130, 209, 140, 209, 141, 209, 130, 208, 190, 208, 188, 208, 149, 209, 129, 208, 187, 208, 184, 209, 130, 208, 190, 208, 179, 208, 190, 208, 188, 208, 181, 208, 189, 209, 143, 208, 178, 209, 129, 208, 181, 209, 133, 209, 141, 209, 130, 208, 190, 208, 185, 208, 180, 208, 176, 208, 182, 208, 181, 208, 177, 209, 139, 208, 187, 208, 184, 208, 179, 208, 190, 208, 180, 209, 131, 208, 180, 208, 181, 208, 189, 209, 140, 209, 141, 209, 130, 208, 190, 209, 130, 208, 177, 209, 139, 208, 187, 208, 176, 209, 129, 208, 181, 208, 177, 209, 143, 208, 190, 208, 180, 208, 184, 208, 189, 209, 129, 208, 181, 208, 177, 208, 181, 208, 189, 208, 176, 208, 180, 208, 190, 209, 129, 208, 176, 208, 185, 209, 130, 209, 132, 208, 190, 209, 130, 208, 190, 208, 189, 208, 181, 208, 179, 208, 190, 209, 129, 208, 178, 208, 190, 208, 184, 209, 129, 208, 178, 208, 190, 208, 185, 208, 184, 208, 179, 209, 128, 209, 139, 209, 130, 208, 190, 208, 182, 208, 181, 208, 178, 209, 129, 208, 181, 208, 188, 209, 129, 208, 178, 208, 190, 209, 142, 208, 187, 208, 184, 209, 136, 209, 140, 209, 141, 209, 130, 208, 184, 209, 133, 208, 191, 208, 190, 208, 186, 208, 176, 208, 180, 208, 189, 208, 181, 208, 185, 208, 180, 208, 190, 208, 188, 208, 176, 208, 188, 208, 184, 209, 128, 208, 176, 208, 187, 208, 184, 208, 177, 208, 190, 209, 130, 208, 181, 208, 188, 209, 131, 209, 133, 208, 190, 209, 130, 209, 143, 208, 180, 208, 178, 209, 131, 209, 133, 209, 129, 208, 181, 209, 130, 208, 184, 208, 187, 209, 142, 208, 180, 208, 184, 208, 180, 208, 181, 208, 187, 208, 190, 208, 188, 208, 184, 209, 128, 208, 181, 209, 130, 208, 181, 208, 177, 209, 143, 209, 129, 208, 178, 208, 190, 208, 181, 208, 178, 208, 184, 208, 180, 208, 181, 209, 135, 208, 181, 208, 179, 208, 190, 209, 141, 209, 130, 208, 184, 208, 188, 209, 129, 209, 135, 208, 181, 209, 130, 209, 130, 208, 181, 208, 188, 209, 139, 209, 134, 208, 181, 208, 189, 209, 139, 209, 129, 209, 130, 208, 176, 208, 187, 208, 178, 208, 181, 208, 180, 209, 140, 209, 130, 208, 181, 208, 188, 208, 181, 208, 178, 208, 190, 208, 180, 209, 139, 209, 130, 208, 181, 208, 177, 208, 181, 208, 178, 209, 139, 209, 136, 208, 181, 208, 189, 208, 176, 208, 188, 208, 184, 209, 130, 208, 184, 208, 191, 208, 176, 209, 130, 208, 190, 208, 188, 209, 131, 208, 191, 209, 128, 208, 176, 208, 178, 208, 187, 208, 184, 209, 134, 208, 176, 208, 190, 208, 180, 208, 189, 208, 176, 208, 179, 208, 190, 208, 180, 209, 139, 208, 183, 208, 189, 208, 176, 209, 142, 208, 188, 208, 190, 208, 179, 209, 131, 208, 180, 209, 128, 209, 131, 208, 179, 208, 178, 209, 129, 208, 181, 208, 185, 208, 184, 208, 180, 208, 181, 209, 130, 208, 186, 208, 184, 208, 189, 208, 190, 208, 190, 208, 180, 208, 189, 208, 190, 208, 180, 208, 181, 208, 187, 208, 176, 208, 180, 208, 181, 208, 187, 208, 181, 209, 129, 209, 128, 208, 190, 208, 186, 208, 184, 209, 142, 208, 189, 209, 143, 208, 178, 208, 181, 209, 129, 209, 140, 208, 149, 209, 129, 209, 130, 209, 140, 209, 128, 208, 176, 208, 183, 208, 176, 208, 189, 208, 176, 209, 136, 208, 184, 216, 167, 217, 132, 217, 132, 217, 135, 216, 167, 217, 132, 216, 170, 217, 138, 216, 172, 217, 133, 217, 138, 216, 185, 216, 174, 216, 167, 216, 181, 216, 169, 216, 167, 217, 132, 216, 176, 217, 138, 216, 185, 217, 132, 217, 138, 217, 135, 216, 172, 216, 175, 217, 138, 216, 175, 216, 167, 217, 132, 216, 162, 217, 134, 216, 167, 217, 132, 216, 177, 216, 175, 216, 170, 216, 173, 217, 131, 217, 133, 216, 181, 217, 129, 216, 173, 216, 169, 217, 131, 216, 167, 217, 134, 216, 170, 216, 167, 217, 132, 217, 132, 217, 138, 217, 138, 217, 131, 217, 136, 217, 134, 216, 180, 216, 168, 217, 131, 216, 169, 217, 129, 217, 138, 217, 135, 216, 167, 216, 168, 217, 134, 216, 167, 216, 170, 216, 173, 217, 136, 216, 167, 216, 161, 216, 163, 217, 131, 216, 171, 216, 177, 216, 174, 217, 132, 216, 167, 217, 132, 216, 167, 217, 132, 216, 173, 216, 168, 216, 175, 217, 132, 217, 138, 217, 132, 216, 175, 216, 177, 217, 136, 216, 179, 216, 167, 216, 182, 216, 186, 216, 183, 216, 170, 217, 131, 217, 136, 217, 134, 217, 135, 217, 134, 216, 167, 217, 131, 216, 179, 216, 167, 216, 173, 216, 169, 217, 134, 216, 167, 216, 175, 217, 138, 216, 167, 217, 132, 216, 183, 216, 168, 216, 185, 217, 132, 217, 138, 217, 131, 216, 180, 217, 131, 216, 177, 216, 167, 217, 138, 217, 133, 217, 131, 217, 134, 217, 133, 217, 134, 217, 135, 216, 167, 216, 180, 216, 177, 217, 131, 216, 169, 216, 177, 216, 166, 217, 138, 216, 179, 217, 134, 216, 180, 217, 138, 216, 183, 217, 133, 216, 167, 216, 176, 216, 167, 216, 167, 217, 132, 217, 129, 217, 134, 216, 180, 216, 168, 216, 167, 216, 168, 216, 170, 216, 185, 216, 168, 216, 177, 216, 177, 216, 173, 217, 133, 216, 169, 217, 131, 216, 167, 217, 129, 216, 169, 217, 138, 217, 130, 217, 136, 217, 132, 217, 133, 216, 177, 217, 131, 216, 178, 217, 131, 217, 132, 217, 133, 216, 169, 216, 163, 216, 173, 217, 133, 216, 175, 217, 130, 217, 132, 216, 168, 217, 138, 217, 138, 216, 185, 217, 134, 217, 138, 216, 181, 217, 136, 216, 177, 216, 169, 216, 183, 216, 177, 217, 138, 217, 130, 216, 180, 216, 167, 216, 177, 217, 131, 216, 172, 217, 136, 216, 167, 217, 132, 216, 163, 216, 174, 216, 177, 217, 137, 217, 133, 216, 185, 217, 134, 216, 167, 216, 167, 216, 168, 216, 173, 216, 171, 216, 185, 216, 177, 217, 136, 216, 182, 216, 168, 216, 180, 217, 131, 217, 132, 217, 133, 216, 179, 216, 172, 217, 132, 216, 168, 217, 134, 216, 167, 217, 134, 216, 174, 216, 167, 217, 132, 216, 175, 217, 131, 216, 170, 216, 167, 216, 168, 217, 131, 217, 132, 217, 138, 216, 169, 216, 168, 216, 175, 217, 136, 217, 134, 216, 163, 217, 138, 216, 182, 216, 167, 217, 138, 217, 136, 216, 172, 216, 175, 217, 129, 216, 177, 217, 138, 217, 130, 217, 131, 216, 170, 216, 168, 216, 170, 216, 163, 217, 129, 216, 182, 217, 132, 217, 133, 216, 183, 216, 168, 216, 174, 216, 167, 217, 131, 216, 171, 216, 177, 216, 168, 216, 167, 216, 177, 217, 131, 216, 167, 217, 129, 216, 182, 217, 132, 216, 167, 216, 173, 217, 132, 217, 137, 217, 134, 217, 129, 216, 179, 217, 135, 216, 163, 217, 138, 216, 167, 217, 133, 216, 177, 216, 175, 217, 136, 216, 175, 216, 163, 217, 134, 217, 135, 216, 167, 216, 175, 217, 138, 217, 134, 216, 167, 216, 167, 217, 132, 216, 167, 217, 134, 217, 133, 216, 185, 216, 177, 216, 182, 216, 170, 216, 185, 217, 132, 217, 133, 216, 175, 216, 167, 216, 174, 217, 132, 217, 133, 217, 133, 217, 131, 217, 134, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 2, 0, 2, 0, 2, 0, 2, 0, 4, 0, 4, 0, 4, 0, 4, 0, 0, 1, 2, 3, 4, 5, 6, 7, 7, 6, 5, 4, 3, 2, 1, 0, 8, 9, 10, 11, 12, 13, 14, 15, 15, 14, 13, 12, 11, 10, 9, 8, 16, 17, 18, 19, 20, 21, 22, 23, 23, 22, 21, 20, 19, 18, 17, 16, 24, 25, 26, 27, 28, 29, 30, 31, 31, 30, 29, 28, 27, 26, 25, 24, 255, 255, 255, 255, 0, 0, 0, 0, 0, 0, 0, 0, 255, 255, 255, 255, 1, 0, 0, 0, 2, 0, 0, 0, 2, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 3, 0, 0, 0, 255, 255, 0, 1, 0, 0, 0, 1, 0, 0, 255, 255, 0, 1, 0, 0, 0, 8, 0, 8, 0, 8, 0, 8, 0, 0, 0, 1, 0, 2, 0, 3, 0, 4, 0, 5, 0, 6, 0, 7, 114, 101, 115, 111, 117, 114, 99, 101, 115, 99, 111, 117, 110, 116, 114, 105, 101, 115, 113, 117, 101, 115, 116, 105, 111, 110, 115, 101, 113, 117, 105, 112, 109, 101, 110, 116, 99, 111, 109, 109, 117, 110, 105, 116, 121, 97, 118, 97, 105, 108, 97, 98, 108, 101, 104, 105, 103, 104, 108, 105, 103, 104, 116, 68, 84, 68, 47, 120, 104, 116, 109, 108, 109, 97, 114, 107, 101, 116, 105, 110, 103, 107, 110, 111, 119, 108, 101, 100, 103, 101, 115, 111, 109, 101, 116, 104, 105, 110, 103, 99, 111, 110, 116, 97, 105, 110, 101, 114, 100, 105, 114, 101, 99, 116, 105, 111, 110, 115, 117, 98, 115, 99, 114, 105, 98, 101, 97, 100, 118, 101, 114, 116, 105, 115, 101, 99, 104, 97, 114, 97, 99, 116, 101, 114, 34, 32, 118, 97, 108, 117, 101, 61, 34, 60, 47, 115, 101, 108, 101, 99, 116, 62, 65, 117, 115, 116, 114, 97, 108, 105, 97, 34, 32, 99, 108, 97, 115, 115, 61, 34, 115, 105, 116, 117, 97, 116, 105, 111, 110, 97, 117, 116, 104, 111, 114, 105, 116, 121, 102, 111, 108, 108, 111, 119, 105, 110, 103, 112, 114, 105, 109, 97, 114, 105, 108, 121, 111, 112, 101, 114, 97, 116, 105, 111, 110, 99, 104, 97, 108, 108, 101, 110, 103, 101, 100, 101, 118, 101, 108, 111, 112, 101, 100, 97, 110, 111, 110, 121, 109, 111, 117, 115, 102, 117, 110, 99, 116, 105, 111, 110, 32, 102, 117, 110, 99, 116, 105, 111, 110, 115, 99, 111, 109, 112, 97, 110, 105, 101, 115, 115, 116, 114, 117, 99, 116, 117, 114, 101, 97, 103, 114, 101, 101, 109, 101, 110, 116, 34, 32, 116, 105, 116, 108, 101, 61, 34, 112, 111, 116, 101, 110, 116, 105, 97, 108, 101, 100, 117, 99, 97, 116, 105, 111, 110, 97, 114, 103, 117, 109, 101, 110, 116, 115, 115, 101, 99, 111, 110, 100, 97, 114, 121, 99, 111, 112, 121, 114, 105, 103, 104, 116, 108, 97, 110, 103, 117, 97, 103, 101, 115, 101, 120, 99, 108, 117, 115, 105, 118, 101, 99, 111, 110, 100, 105, 116, 105, 111, 110, 60, 47, 102, 111, 114, 109, 62, 13, 10, 115, 116, 97, 116, 101, 109, 101, 110, 116, 97, 116, 116, 101, 110, 116, 105, 111, 110, 66, 105, 111, 103, 114, 97, 112, 104, 121, 125, 32, 101, 108, 115, 101, 32, 123, 10, 115, 111, 108, 117, 116, 105, 111, 110, 115, 119, 104, 101, 110, 32, 116, 104, 101, 32, 65, 110, 97, 108, 121, 116, 105, 99, 115, 116, 101, 109, 112, 108, 97, 116, 101, 115, 100, 97, 110, 103, 101, 114, 111, 117, 115, 115, 97, 116, 101, 108, 108, 105, 116, 101, 100, 111, 99, 117, 109, 101, 110, 116, 115, 112, 117, 98, 108, 105, 115, 104, 101, 114, 105, 109, 112, 111, 114, 116, 97, 110, 116, 112, 114, 111, 116, 111, 116, 121, 112, 101, 105, 110, 102, 108, 117, 101, 110, 99, 101, 38, 114, 97, 113, 117, 111, 59, 60, 47, 101, 102, 102, 101, 99, 116, 105, 118, 101, 103, 101, 110, 101, 114, 97, 108, 108, 121, 116, 114, 97, 110, 115, 102, 111, 114, 109, 98, 101, 97, 117, 116, 105, 102, 117, 108, 116, 114, 97, 110, 115, 112, 111, 114, 116, 111, 114, 103, 97, 110, 105, 122, 101, 100, 112, 117, 98, 108, 105, 115, 104, 101, 100, 112, 114, 111, 109, 105, 110, 101, 110, 116, 117, 110, 116, 105, 108, 32, 116, 104, 101, 116, 104, 117, 109, 98, 110, 97, 105, 108, 78, 97, 116, 105, 111, 110, 97, 108, 32, 46, 102, 111, 99, 117, 115, 40, 41, 59, 111, 118, 101, 114, 32, 116, 104, 101, 32, 109, 105, 103, 114, 97, 116, 105, 111, 110, 97, 110, 110, 111, 117, 110, 99, 101, 100, 102, 111, 111, 116, 101, 114, 34, 62, 10, 101, 120, 99, 101, 112, 116, 105, 111, 110, 108, 101, 115, 115, 32, 116, 104, 97, 110, 101, 120, 112, 101, 110, 115, 105, 118, 101, 102, 111, 114, 109, 97, 116, 105, 111, 110, 102, 114, 97, 109, 101, 119, 111, 114, 107, 116, 101, 114, 114, 105, 116, 111, 114, 121, 110, 100, 105, 99, 97, 116, 105, 111, 110, 99, 117, 114, 114, 101, 110, 116, 108, 121, 99, 108, 97, 115, 115, 78, 97, 109, 101, 99, 114, 105, 116, 105, 99, 105, 115, 109, 116, 114, 97, 100, 105, 116, 105, 111, 110, 101, 108, 115, 101, 119, 104, 101, 114, 101, 65, 108, 101, 120, 97, 110, 100, 101, 114, 97, 112, 112, 111, 105, 110, 116, 101, 100, 109, 97, 116, 101, 114, 105, 97, 108, 115, 98, 114, 111, 97, 100, 99, 97, 115, 116, 109, 101, 110, 116, 105, 111, 110, 101, 100, 97, 102, 102, 105, 108, 105, 97, 116, 101, 60, 47, 111, 112, 116, 105, 111, 110, 62, 116, 114, 101, 97, 116, 109, 101, 110, 116, 100, 105, 102, 102, 101, 114, 101, 110, 116, 47, 100, 101, 102, 97, 117, 108, 116, 46, 80, 114, 101, 115, 105, 100, 101, 110, 116, 111, 110, 99, 108, 105, 99, 107, 61, 34, 98, 105, 111, 103, 114, 97, 112, 104, 121, 111, 116, 104, 101, 114, 119, 105, 115, 101, 112, 101, 114, 109, 97, 110, 101, 110, 116, 70, 114, 97, 110, 195, 167, 97, 105, 115, 72, 111, 108, 108, 121, 119, 111, 111, 100, 101, 120, 112, 97, 110, 115, 105, 111, 110, 115, 116, 97, 110, 100, 97, 114, 100, 115, 60, 47, 115, 116, 121, 108, 101, 62, 10, 114, 101, 100, 117, 99, 116, 105, 111, 110, 68, 101, 99, 101, 109, 98, 101, 114, 32, 112, 114, 101, 102, 101, 114, 114, 101, 100, 67, 97, 109, 98, 114, 105, 100, 103, 101, 111, 112, 112, 111, 110, 101, 110, 116, 115, 66, 117, 115, 105, 110, 101, 115, 115, 32, 99, 111, 110, 102, 117, 115, 105, 111, 110, 62, 10, 60, 116, 105, 116, 108, 101, 62, 112, 114, 101, 115, 101, 110, 116, 101, 100, 101, 120, 112, 108, 97, 105, 110, 101, 100, 100, 111, 101, 115, 32, 110, 111, 116, 32, 119, 111, 114, 108, 100, 119, 105, 100, 101, 105, 110, 116, 101, 114, 102, 97, 99, 101, 112, 111, 115, 105, 116, 105, 111, 110, 115, 110, 101, 119, 115, 112, 97, 112, 101, 114, 60, 47, 116, 97, 98, 108, 101, 62, 10, 109, 111, 117, 110, 116, 97, 105, 110, 115, 108, 105, 107, 101, 32, 116, 104, 101, 32, 101, 115, 115, 101, 110, 116, 105, 97, 108, 102, 105, 110, 97, 110, 99, 105, 97, 108, 115, 101, 108, 101, 99, 116, 105, 111, 110, 97, 99, 116, 105, 111, 110, 61, 34, 47, 97, 98, 97, 110, 100, 111, 110, 101, 100, 69, 100, 117, 99, 97, 116, 105, 111, 110, 112, 97, 114, 115, 101, 73, 110, 116, 40, 115, 116, 97, 98, 105, 108, 105, 116, 121, 117, 110, 97, 98, 108, 101, 32, 116, 111, 60, 47, 116, 105, 116, 108, 101, 62, 10, 114, 101, 108, 97, 116, 105, 111, 110, 115, 78, 111, 116, 101, 32, 116, 104, 97, 116, 101, 102, 102, 105, 99, 105, 101, 110, 116, 112, 101, 114, 102, 111, 114, 109, 101, 100, 116, 119, 111, 32, 121, 101, 97, 114, 115, 83, 105, 110, 99, 101, 32, 116, 104, 101, 116, 104, 101, 114, 101, 102, 111, 114, 101, 119, 114, 97, 112, 112, 101, 114, 34, 62, 97, 108, 116, 101, 114, 110, 97, 116, 101, 105, 110, 99, 114, 101, 97, 115, 101, 100, 66, 97, 116, 116, 108, 101, 32, 111, 102, 112, 101, 114, 99, 101, 105, 118, 101, 100, 116, 114, 121, 105, 110, 103, 32, 116, 111, 110, 101, 99, 101, 115, 115, 97, 114, 121, 112, 111, 114, 116, 114, 97, 121, 101, 100, 101, 108, 101, 99, 116, 105, 111, 110, 115, 69, 108, 105, 122, 97, 98, 101, 116, 104, 60, 47, 105, 102, 114, 97, 109, 101, 62, 100, 105, 115, 99, 111, 118, 101, 114, 121, 105, 110, 115, 117, 114, 97, 110, 99, 101, 115, 46, 108, 101, 110, 103, 116, 104, 59, 108, 101, 103, 101, 110, 100, 97, 114, 121, 71, 101, 111, 103, 114, 97, 112, 104, 121, 99, 97, 110, 100, 105, 100, 97, 116, 101, 99, 111, 114, 112, 111, 114, 97, 116, 101, 115, 111, 109, 101, 116, 105, 109, 101, 115, 115, 101, 114, 118, 105, 99, 101, 115, 46, 105, 110, 104, 101, 114, 105, 116, 101, 100, 60, 47, 115, 116, 114, 111, 110, 103, 62, 67, 111, 109, 109, 117, 110, 105, 116, 121, 114, 101, 108, 105, 103, 105, 111, 117, 115, 108, 111, 99, 97, 116, 105, 111, 110, 115, 67, 111, 109, 109, 105, 116, 116, 101, 101, 98, 117, 105, 108, 100, 105, 110, 103, 115, 116, 104, 101, 32, 119, 111, 114, 108, 100, 110, 111, 32, 108, 111, 110, 103, 101, 114, 98, 101, 103, 105, 110, 110, 105, 110, 103, 114, 101, 102, 101, 114, 101, 110, 99, 101, 99, 97, 110, 110, 111, 116, 32, 98, 101, 102, 114, 101, 113, 117, 101, 110, 99, 121, 116, 121, 112, 105, 99, 97, 108, 108, 121, 105, 110, 116, 111, 32, 116, 104, 101, 32, 114, 101, 108, 97, 116, 105, 118, 101, 59, 114, 101, 99, 111, 114, 100, 105, 110, 103, 112, 114, 101, 115, 105, 100, 101, 110, 116, 105, 110, 105, 116, 105, 97, 108, 108, 121, 116, 101, 99, 104, 110, 105, 113, 117, 101, 116, 104, 101, 32, 111, 116, 104, 101, 114, 105, 116, 32, 99, 97, 110, 32, 98, 101, 101, 120, 105, 115, 116, 101, 110, 99, 101, 117, 110, 100, 101, 114, 108, 105, 110, 101, 116, 104, 105, 115, 32, 116, 105, 109, 101, 116, 101, 108, 101, 112, 104, 111, 110, 101, 105, 116, 101, 109, 115, 99, 111, 112, 101, 112, 114, 97, 99, 116, 105, 99, 101, 115, 97, 100, 118, 97, 110, 116, 97, 103, 101, 41, 59, 114, 101, 116, 117, 114, 110, 32, 70, 111, 114, 32, 111, 116, 104, 101, 114, 112, 114, 111, 118, 105, 100, 105, 110, 103, 100, 101, 109, 111, 99, 114, 97, 99, 121, 98, 111, 116, 104, 32, 116, 104, 101, 32, 101, 120, 116, 101, 110, 115, 105, 118, 101, 115, 117, 102, 102, 101, 114, 105, 110, 103, 115, 117, 112, 112, 111, 114, 116, 101, 100, 99, 111, 109, 112, 117, 116, 101, 114, 115, 32, 102, 117, 110, 99, 116, 105, 111, 110, 112, 114, 97, 99, 116, 105, 99, 97, 108, 115, 97, 105, 100, 32, 116, 104, 97, 116, 105, 116, 32, 109, 97, 121, 32, 98, 101, 69, 110, 103, 108, 105, 115, 104, 60, 47, 102, 114, 111, 109, 32, 116, 104, 101, 32, 115, 99, 104, 101, 100, 117, 108, 101, 100, 100, 111, 119, 110, 108, 111, 97, 100, 115, 60, 47, 108, 97, 98, 101, 108, 62, 10, 115, 117, 115, 112, 101, 99, 116, 101, 100, 109, 97, 114, 103, 105, 110, 58, 32, 48, 115, 112, 105, 114, 105, 116, 117, 97, 108, 60, 47, 104, 101, 97, 100, 62, 10, 10, 109, 105, 99, 114, 111, 115, 111, 102, 116, 103, 114, 97, 100, 117, 97, 108, 108, 121, 100, 105, 115, 99, 117, 115, 115, 101, 100, 104, 101, 32, 98, 101, 99, 97, 109, 101, 101, 120, 101, 99, 117, 116, 105, 118, 101, 106, 113, 117, 101, 114, 121, 46, 106, 115, 104, 111, 117, 115, 101, 104, 111, 108, 100, 99, 111, 110, 102, 105, 114, 109, 101, 100, 112, 117, 114, 99, 104, 97, 115, 101, 100, 108, 105, 116, 101, 114, 97, 108, 108, 121, 100, 101, 115, 116, 114, 111, 121, 101, 100, 117, 112, 32, 116, 111, 32, 116, 104, 101, 118, 97, 114, 105, 97, 116, 105, 111, 110, 114, 101, 109, 97, 105, 110, 105, 110, 103, 105, 116, 32, 105, 115, 32, 110, 111, 116, 99, 101, 110, 116, 117, 114, 105, 101, 115, 74, 97, 112, 97, 110, 101, 115, 101, 32, 97, 109, 111, 110, 103, 32, 116, 104, 101, 99, 111, 109, 112, 108, 101, 116, 101, 100, 97, 108, 103, 111, 114, 105, 116, 104, 109, 105, 110, 116, 101, 114, 101, 115, 116, 115, 114, 101, 98, 101, 108, 108, 105, 111, 110, 117, 110, 100, 101, 102, 105, 110, 101, 100, 101, 110, 99, 111, 117, 114, 97, 103, 101, 114, 101, 115, 105, 122, 97, 98, 108, 101, 105, 110, 118, 111, 108, 118, 105, 110, 103, 115, 101, 110, 115, 105, 116, 105, 118, 101, 117, 110, 105, 118, 101, 114, 115, 97, 108, 112, 114, 111, 118, 105, 115, 105, 111, 110, 40, 97, 108, 116, 104, 111, 117, 103, 104, 102, 101, 97, 116, 117, 114, 105, 110, 103, 99, 111, 110, 100, 117, 99, 116, 101, 100, 41, 44, 32, 119, 104, 105, 99, 104, 32, 99, 111, 110, 116, 105, 110, 117, 101, 100, 45, 104, 101, 97, 100, 101, 114, 34, 62, 70, 101, 98, 114, 117, 97, 114, 121, 32, 110, 117, 109, 101, 114, 111, 117, 115, 32, 111, 118, 101, 114, 102, 108, 111, 119, 58, 99, 111, 109, 112, 111, 110, 101, 110, 116, 102, 114, 97, 103, 109, 101, 110, 116, 115, 101, 120, 99, 101, 108, 108, 101, 110, 116, 99, 111, 108, 115, 112, 97, 110, 61, 34, 116, 101, 99, 104, 110, 105, 99, 97, 108, 110, 101, 97, 114, 32, 116, 104, 101, 32, 65, 100, 118, 97, 110, 99, 101, 100, 32, 115, 111, 117, 114, 99, 101, 32, 111, 102, 101, 120, 112, 114, 101, 115, 115, 101, 100, 72, 111, 110, 103, 32, 75, 111, 110, 103, 32, 70, 97, 99, 101, 98, 111, 111, 107, 109, 117, 108, 116, 105, 112, 108, 101, 32, 109, 101, 99, 104, 97, 110, 105, 115, 109, 101, 108, 101, 118, 97, 116, 105, 111, 110, 111, 102, 102, 101, 110, 115, 105, 118, 101, 60, 47, 102, 111, 114, 109, 62, 10, 9, 115, 112, 111, 110, 115, 111, 114, 101, 100, 100, 111, 99, 117, 109, 101, 110, 116, 46, 111, 114, 32, 38, 113, 117, 111, 116, 59, 116, 104, 101, 114, 101, 32, 97, 114, 101, 116, 104, 111, 115, 101, 32, 119, 104, 111, 109, 111, 118, 101, 109, 101, 110, 116, 115, 112, 114, 111, 99, 101, 115, 115, 101, 115, 100, 105, 102, 102, 105, 99, 117, 108, 116, 115, 117, 98, 109, 105, 116, 116, 101, 100, 114, 101, 99, 111, 109, 109, 101, 110, 100, 99, 111, 110, 118, 105, 110, 99, 101, 100, 112, 114, 111, 109, 111, 116, 105, 110, 103, 34, 32, 119, 105, 100, 116, 104, 61, 34, 46, 114, 101, 112, 108, 97, 99, 101, 40, 99, 108, 97, 115, 115, 105, 99, 97, 108, 99, 111, 97, 108, 105, 116, 105, 111, 110, 104, 105, 115, 32, 102, 105, 114, 115, 116, 100, 101, 99, 105, 115, 105, 111, 110, 115, 97, 115, 115, 105, 115, 116, 97, 110, 116, 105, 110, 100, 105, 99, 97, 116, 101, 100, 101, 118, 111, 108, 117, 116, 105, 111, 110, 45, 119, 114, 97, 112, 112, 101, 114, 34, 101, 110, 111, 117, 103, 104, 32, 116, 111, 97, 108, 111, 110, 103, 32, 116, 104, 101, 100, 101, 108, 105, 118, 101, 114, 101, 100, 45, 45, 62, 13, 10, 60, 33, 45, 45, 65, 109, 101, 114, 105, 99, 97, 110, 32, 112, 114, 111, 116, 101, 99, 116, 101, 100, 78, 111, 118, 101, 109, 98, 101, 114, 32, 60, 47, 115, 116, 121, 108, 101, 62, 60, 102, 117, 114, 110, 105, 116, 117, 114, 101, 73, 110, 116, 101, 114, 110, 101, 116, 32, 32, 111, 110, 98, 108, 117, 114, 61, 34, 115, 117, 115, 112, 101, 110, 100, 101, 100, 114, 101, 99, 105, 112, 105, 101, 110, 116, 98, 97, 115, 101, 100, 32, 111, 110, 32, 77, 111, 114, 101, 111, 118, 101, 114, 44, 97, 98, 111, 108, 105, 115, 104, 101, 100, 99, 111, 108, 108, 101, 99, 116, 101, 100, 119, 101, 114, 101, 32, 109, 97, 100, 101, 101, 109, 111, 116, 105, 111, 110, 97, 108, 101, 109, 101, 114, 103, 101, 110, 99, 121, 110, 97, 114, 114, 97, 116, 105, 118, 101, 97, 100, 118, 111, 99, 97, 116, 101, 115, 112, 120, 59, 98, 111, 114, 100, 101, 114, 99, 111, 109, 109, 105, 116, 116, 101, 100, 100, 105, 114, 61, 34, 108, 116, 114, 34, 101, 109, 112, 108, 111, 121, 101, 101, 115, 114, 101, 115, 101, 97, 114, 99, 104, 46, 32, 115, 101, 108, 101, 99, 116, 101, 100, 115, 117, 99, 99, 101, 115, 115, 111, 114, 99, 117, 115, 116, 111, 109, 101, 114, 115, 100, 105, 115, 112, 108, 97, 121, 101, 100, 83, 101, 112, 116, 101, 109, 98, 101, 114, 97, 100, 100, 67, 108, 97, 115, 115, 40, 70, 97, 99, 101, 98, 111, 111, 107, 32, 115, 117, 103, 103, 101, 115, 116, 101, 100, 97, 110, 100, 32, 108, 97, 116, 101, 114, 111, 112, 101, 114, 97, 116, 105, 110, 103, 101, 108, 97, 98, 111, 114, 97, 116, 101, 83, 111, 109, 101, 116, 105, 109, 101, 115, 73, 110, 115, 116, 105, 116, 117, 116, 101, 99, 101, 114, 116, 97, 105, 110, 108, 121, 105, 110, 115, 116, 97, 108, 108, 101, 100, 102, 111, 108, 108, 111, 119, 101, 114, 115, 74, 101, 114, 117, 115, 97, 108, 101, 109, 116, 104, 101, 121, 32, 104, 97, 118, 101, 99, 111, 109, 112, 117, 116, 105, 110, 103, 103, 101, 110, 101, 114, 97, 116, 101, 100, 112, 114, 111, 118, 105, 110, 99, 101, 115, 103, 117, 97, 114, 97, 110, 116, 101, 101, 97, 114, 98, 105, 116, 114, 97, 114, 121, 114, 101, 99, 111, 103, 110, 105, 122, 101, 119, 97, 110, 116, 101, 100, 32, 116, 111, 112, 120, 59, 119, 105, 100, 116, 104, 58, 116, 104, 101, 111, 114, 121, 32, 111, 102, 98, 101, 104, 97, 118, 105, 111, 117, 114, 87, 104, 105, 108, 101, 32, 116, 104, 101, 101, 115, 116, 105, 109, 97, 116, 101, 100, 98, 101, 103, 97, 110, 32, 116, 111, 32, 105, 116, 32, 98, 101, 99, 97, 109, 101, 109, 97, 103, 110, 105, 116, 117, 100, 101, 109, 117, 115, 116, 32, 104, 97, 118, 101, 109, 111, 114, 101, 32, 116, 104, 97, 110, 68, 105, 114, 101, 99, 116, 111, 114, 121, 101, 120, 116, 101, 110, 115, 105, 111, 110, 115, 101, 99, 114, 101, 116, 97, 114, 121, 110, 97, 116, 117, 114, 97, 108, 108, 121, 111, 99, 99, 117, 114, 114, 105, 110, 103, 118, 97, 114, 105, 97, 98, 108, 101, 115, 103, 105, 118, 101, 110, 32, 116, 104, 101, 112, 108, 97, 116, 102, 111, 114, 109, 46, 60, 47, 108, 97, 98, 101, 108, 62, 60, 102, 97, 105, 108, 101, 100, 32, 116, 111, 99, 111, 109, 112, 111, 117, 110, 100, 115, 107, 105, 110, 100, 115, 32, 111, 102, 32, 115, 111, 99, 105, 101, 116, 105, 101, 115, 97, 108, 111, 110, 103, 115, 105, 100, 101, 32, 45, 45, 38, 103, 116, 59, 10, 10, 115, 111, 117, 116, 104, 119, 101, 115, 116, 116, 104, 101, 32, 114, 105, 103, 104, 116, 114, 97, 100, 105, 97, 116, 105, 111, 110, 109, 97, 121, 32, 104, 97, 118, 101, 32, 117, 110, 101, 115, 99, 97, 112, 101, 40, 115, 112, 111, 107, 101, 110, 32, 105, 110, 34, 32, 104, 114, 101, 102, 61, 34, 47, 112, 114, 111, 103, 114, 97, 109, 109, 101, 111, 110, 108, 121, 32, 116, 104, 101, 32, 99, 111, 109, 101, 32, 102, 114, 111, 109, 100, 105, 114, 101, 99, 116, 111, 114, 121, 98, 117, 114, 105, 101, 100, 32, 105, 110, 97, 32, 115, 105, 109, 105, 108, 97, 114, 116, 104, 101, 121, 32, 119, 101, 114, 101, 60, 47, 102, 111, 110, 116, 62, 60, 47, 78, 111, 114, 119, 101, 103, 105, 97, 110, 115, 112, 101, 99, 105, 102, 105, 101, 100, 112, 114, 111, 100, 117, 99, 105, 110, 103, 112, 97, 115, 115, 101, 110, 103, 101, 114, 40, 110, 101, 119, 32, 68, 97, 116, 101, 116, 101, 109, 112, 111, 114, 97, 114, 121, 102, 105, 99, 116, 105, 111, 110, 97, 108, 65, 102, 116, 101, 114, 32, 116, 104, 101, 101, 113, 117, 97, 116, 105, 111, 110, 115, 100, 111, 119, 110, 108, 111, 97, 100, 46, 114, 101, 103, 117, 108, 97, 114, 108, 121, 100, 101, 118, 101, 108, 111, 112, 101, 114, 97, 98, 111, 118, 101, 32, 116, 104, 101, 108, 105, 110, 107, 101, 100, 32, 116, 111, 112, 104, 101, 110, 111, 109, 101, 110, 97, 112, 101, 114, 105, 111, 100, 32, 111, 102, 116, 111, 111, 108, 116, 105, 112, 34, 62, 115, 117, 98, 115, 116, 97, 110, 99, 101, 97, 117, 116, 111, 109, 97, 116, 105, 99, 97, 115, 112, 101, 99, 116, 32, 111, 102, 65, 109, 111, 110, 103, 32, 116, 104, 101, 99, 111, 110, 110, 101, 99, 116, 101, 100, 101, 115, 116, 105, 109, 97, 116, 101, 115, 65, 105, 114, 32, 70, 111, 114, 99, 101, 115, 121, 115, 116, 101, 109, 32, 111, 102, 111, 98, 106, 101, 99, 116, 105, 118, 101, 105, 109, 109, 101, 100, 105, 97, 116, 101, 109, 97, 107, 105, 110, 103, 32, 105, 116, 112, 97, 105, 110, 116, 105, 110, 103, 115, 99, 111, 110, 113, 117, 101, 114, 101, 100, 97, 114, 101, 32, 115, 116, 105, 108, 108, 112, 114, 111, 99, 101, 100, 117, 114, 101, 103, 114, 111, 119, 116, 104, 32, 111, 102, 104, 101, 97, 100, 101, 100, 32, 98, 121, 69, 117, 114, 111, 112, 101, 97, 110, 32, 100, 105, 118, 105, 115, 105, 111, 110, 115, 109, 111, 108, 101, 99, 117, 108, 101, 115, 102, 114, 97, 110, 99, 104, 105, 115, 101, 105, 110, 116, 101, 110, 116, 105, 111, 110, 97, 116, 116, 114, 97, 99, 116, 101, 100, 99, 104, 105, 108, 100, 104, 111, 111, 100, 97, 108, 115, 111, 32, 117, 115, 101, 100, 100, 101, 100, 105, 99, 97, 116, 101, 100, 115, 105, 110, 103, 97, 112, 111, 114, 101, 100, 101, 103, 114, 101, 101, 32, 111, 102, 102, 97, 116, 104, 101, 114, 32, 111, 102, 99, 111, 110, 102, 108, 105, 99, 116, 115, 60, 47, 97, 62, 60, 47, 112, 62, 10, 99, 97, 109, 101, 32, 102, 114, 111, 109, 119, 101, 114, 101, 32, 117, 115, 101, 100, 110, 111, 116, 101, 32, 116, 104, 97, 116, 114, 101, 99, 101, 105, 118, 105, 110, 103, 69, 120, 101, 99, 117, 116, 105, 118, 101, 101, 118, 101, 110, 32, 109, 111, 114, 101, 97, 99, 99, 101, 115, 115, 32, 116, 111, 99, 111, 109, 109, 97, 110, 100, 101, 114, 80, 111, 108, 105, 116, 105, 99, 97, 108, 109, 117, 115, 105, 99, 105, 97, 110, 115, 100, 101, 108, 105, 99, 105, 111, 117, 115, 112, 114, 105, 115, 111, 110, 101, 114, 115, 97, 100, 118, 101, 110, 116, 32, 111, 102, 85, 84, 70, 45, 56, 34, 32, 47, 62, 60, 33, 91, 67, 68, 65, 84, 65, 91, 34, 62, 67, 111, 110, 116, 97, 99, 116, 83, 111, 117, 116, 104, 101, 114, 110, 32, 98, 103, 99, 111, 108, 111, 114, 61, 34, 115, 101, 114, 105, 101, 115, 32, 111, 102, 46, 32, 73, 116, 32, 119, 97, 115, 32, 105, 110, 32, 69, 117, 114, 111, 112, 101, 112, 101, 114, 109, 105, 116, 116, 101, 100, 118, 97, 108, 105, 100, 97, 116, 101, 46, 97, 112, 112, 101, 97, 114, 105, 110, 103, 111, 102, 102, 105, 99, 105, 97, 108, 115, 115, 101, 114, 105, 111, 117, 115, 108, 121, 45, 108, 97, 110, 103, 117, 97, 103, 101, 105, 110, 105, 116, 105, 97, 116, 101, 100, 101, 120, 116, 101, 110, 100, 105, 110, 103, 108, 111, 110, 103, 45, 116, 101, 114, 109, 105, 110, 102, 108, 97, 116, 105, 111, 110, 115, 117, 99, 104, 32, 116, 104, 97, 116, 103, 101, 116, 67, 111, 111, 107, 105, 101, 109, 97, 114, 107, 101, 100, 32, 98, 121, 60, 47, 98, 117, 116, 116, 111, 110, 62, 105, 109, 112, 108, 101, 109, 101, 110, 116, 98, 117, 116, 32, 105, 116, 32, 105, 115, 105, 110, 99, 114, 101, 97, 115, 101, 115, 100, 111, 119, 110, 32, 116, 104, 101, 32, 114, 101, 113, 117, 105, 114, 105, 110, 103, 100, 101, 112, 101, 110, 100, 101, 110, 116, 45, 45, 62, 10, 60, 33, 45, 45, 32, 105, 110, 116, 101, 114, 118, 105, 101, 119, 87, 105, 116, 104, 32, 116, 104, 101, 32, 99, 111, 112, 105, 101, 115, 32, 111, 102, 99, 111, 110, 115, 101, 110, 115, 117, 115, 119, 97, 115, 32, 98, 117, 105, 108, 116, 86, 101, 110, 101, 122, 117, 101, 108, 97, 40, 102, 111, 114, 109, 101, 114, 108, 121, 116, 104, 101, 32, 115, 116, 97, 116, 101, 112, 101, 114, 115, 111, 110, 110, 101, 108, 115, 116, 114, 97, 116, 101, 103, 105, 99, 102, 97, 118, 111, 117, 114, 32, 111, 102, 105, 110, 118, 101, 110, 116, 105, 111, 110, 87, 105, 107, 105, 112, 101, 100, 105, 97, 99, 111, 110, 116, 105, 110, 101, 110, 116, 118, 105, 114, 116, 117, 97, 108, 108, 121, 119, 104, 105, 99, 104, 32, 119, 97, 115, 112, 114, 105, 110, 99, 105, 112, 108, 101, 67, 111, 109, 112, 108, 101, 116, 101, 32, 105, 100, 101, 110, 116, 105, 99, 97, 108, 115, 104, 111, 119, 32, 116, 104, 97, 116, 112, 114, 105, 109, 105, 116, 105, 118, 101, 97, 119, 97, 121, 32, 102, 114, 111, 109, 109, 111, 108, 101, 99, 117, 108, 97, 114, 112, 114, 101, 99, 105, 115, 101, 108, 121, 100, 105, 115, 115, 111, 108, 118, 101, 100, 85, 110, 100, 101, 114, 32, 116, 104, 101, 118, 101, 114, 115, 105, 111, 110, 61, 34, 62, 38, 110, 98, 115, 112, 59, 60, 47, 73, 116, 32, 105, 115, 32, 116, 104, 101, 32, 84, 104, 105, 115, 32, 105, 115, 32, 119, 105, 108, 108, 32, 104, 97, 118, 101, 111, 114, 103, 97, 110, 105, 115, 109, 115, 115, 111, 109, 101, 32, 116, 105, 109, 101, 70, 114, 105, 101, 100, 114, 105, 99, 104, 119, 97, 115, 32, 102, 105, 114, 115, 116, 116, 104, 101, 32, 111, 110, 108, 121, 32, 102, 97, 99, 116, 32, 116, 104, 97, 116, 102, 111, 114, 109, 32, 105, 100, 61, 34, 112, 114, 101, 99, 101, 100, 105, 110, 103, 84, 101, 99, 104, 110, 105, 99, 97, 108, 112, 104, 121, 115, 105, 99, 105, 115, 116, 111, 99, 99, 117, 114, 115, 32, 105, 110, 110, 97, 118, 105, 103, 97, 116, 111, 114, 115, 101, 99, 116, 105, 111, 110, 34, 62, 115, 112, 97, 110, 32, 105, 100, 61, 34, 115, 111, 117, 103, 104, 116, 32, 116, 111, 98, 101, 108, 111, 119, 32, 116, 104, 101, 115, 117, 114, 118, 105, 118, 105, 110, 103, 125, 60, 47, 115, 116, 121, 108, 101, 62, 104, 105, 115, 32, 100, 101, 97, 116, 104, 97, 115, 32, 105, 110, 32, 116, 104, 101, 99, 97, 117, 115, 101, 100, 32, 98, 121, 112, 97, 114, 116, 105, 97, 108, 108, 121, 101, 120, 105, 115, 116, 105, 110, 103, 32, 117, 115, 105, 110, 103, 32, 116, 104, 101, 119, 97, 115, 32, 103, 105, 118, 101, 110, 97, 32, 108, 105, 115, 116, 32, 111, 102, 108, 101, 118, 101, 108, 115, 32, 111, 102, 110, 111, 116, 105, 111, 110, 32, 111, 102, 79, 102, 102, 105, 99, 105, 97, 108, 32, 100, 105, 115, 109, 105, 115, 115, 101, 100, 115, 99, 105, 101, 110, 116, 105, 115, 116, 114, 101, 115, 101, 109, 98, 108, 101, 115, 100, 117, 112, 108, 105, 99, 97, 116, 101, 101, 120, 112, 108, 111, 115, 105, 118, 101, 114, 101, 99, 111, 118, 101, 114, 101, 100, 97, 108, 108, 32, 111, 116, 104, 101, 114, 103, 97, 108, 108, 101, 114, 105, 101, 115, 123, 112, 97, 100, 100, 105, 110, 103, 58, 112, 101, 111, 112, 108, 101, 32, 111, 102, 114, 101, 103, 105, 111, 110, 32, 111, 102, 97, 100, 100, 114, 101, 115, 115, 101, 115, 97, 115, 115, 111, 99, 105, 97, 116, 101, 105, 109, 103, 32, 97, 108, 116, 61, 34, 105, 110, 32, 109, 111, 100, 101, 114, 110, 115, 104, 111, 117, 108, 100, 32, 98, 101, 109, 101, 116, 104, 111, 100, 32, 111, 102, 114, 101, 112, 111, 114, 116, 105, 110, 103, 116, 105, 109, 101, 115, 116, 97, 109, 112, 110, 101, 101, 100, 101, 100, 32, 116, 111, 116, 104, 101, 32, 71, 114, 101, 97, 116, 114, 101, 103, 97, 114, 100, 105, 110, 103, 115, 101, 101, 109, 101, 100, 32, 116, 111, 118, 105, 101, 119, 101, 100, 32, 97, 115, 105, 109, 112, 97, 99, 116, 32, 111, 110, 105, 100, 101, 97, 32, 116, 104, 97, 116, 116, 104, 101, 32, 87, 111, 114, 108, 100, 104, 101, 105, 103, 104, 116, 32, 111, 102, 101, 120, 112, 97, 110, 100, 105, 110, 103, 84, 104, 101, 115, 101, 32, 97, 114, 101, 99, 117, 114, 114, 101, 110, 116, 34, 62, 99, 97, 114, 101, 102, 117, 108, 108, 121, 109, 97, 105, 110, 116, 97, 105, 110, 115, 99, 104, 97, 114, 103, 101, 32, 111, 102, 67, 108, 97, 115, 115, 105, 99, 97, 108, 97, 100, 100, 114, 101, 115, 115, 101, 100, 112, 114, 101, 100, 105, 99, 116, 101, 100, 111, 119, 110, 101, 114, 115, 104, 105, 112, 60, 100, 105, 118, 32, 105, 100, 61, 34, 114, 105, 103, 104, 116, 34, 62, 13, 10, 114, 101, 115, 105, 100, 101, 110, 99, 101, 108, 101, 97, 118, 101, 32, 116, 104, 101, 99, 111, 110, 116, 101, 110, 116, 34, 62, 97, 114, 101, 32, 111, 102, 116, 101, 110, 32, 32, 125, 41, 40, 41, 59, 13, 10, 112, 114, 111, 98, 97, 98, 108, 121, 32, 80, 114, 111, 102, 101, 115, 115, 111, 114, 45, 98, 117, 116, 116, 111, 110, 34, 32, 114, 101, 115, 112, 111, 110, 100, 101, 100, 115, 97, 121, 115, 32, 116, 104, 97, 116, 104, 97, 100, 32, 116, 111, 32, 98, 101, 112, 108, 97, 99, 101, 100, 32, 105, 110, 72, 117, 110, 103, 97, 114, 105, 97, 110, 115, 116, 97, 116, 117, 115, 32, 111, 102, 115, 101, 114, 118, 101, 115, 32, 97, 115, 85, 110, 105, 118, 101, 114, 115, 97, 108, 101, 120, 101, 99, 117, 116, 105, 111, 110, 97, 103, 103, 114, 101, 103, 97, 116, 101, 102, 111, 114, 32, 119, 104, 105, 99, 104, 105, 110, 102, 101, 99, 116, 105, 111, 110, 97, 103, 114, 101, 101, 100, 32, 116, 111, 104, 111, 119, 101, 118, 101, 114, 44, 32, 112, 111, 112, 117, 108, 97, 114, 34, 62, 112, 108, 97, 99, 101, 100, 32, 111, 110, 99, 111, 110, 115, 116, 114, 117, 99, 116, 101, 108, 101, 99, 116, 111, 114, 97, 108, 115, 121, 109, 98, 111, 108, 32, 111, 102, 105, 110, 99, 108, 117, 100, 105, 110, 103, 114, 101, 116, 117, 114, 110, 32, 116, 111, 97, 114, 99, 104, 105, 116, 101, 99, 116, 67, 104, 114, 105, 115, 116, 105, 97, 110, 112, 114, 101, 118, 105, 111, 117, 115, 32, 108, 105, 118, 105, 110, 103, 32, 105, 110, 101, 97, 115, 105, 101, 114, 32, 116, 111, 112, 114, 111, 102, 101, 115, 115, 111, 114, 10, 38, 108, 116, 59, 33, 45, 45, 32, 101, 102, 102, 101, 99, 116, 32, 111, 102, 97, 110, 97, 108, 121, 116, 105, 99, 115, 119, 97, 115, 32, 116, 97, 107, 101, 110, 119, 104, 101, 114, 101, 32, 116, 104, 101, 116, 111, 111, 107, 32, 111, 118, 101, 114, 98, 101, 108, 105, 101, 102, 32, 105, 110, 65, 102, 114, 105, 107, 97, 97, 110, 115, 97, 115, 32, 102, 97, 114, 32, 97, 115, 112, 114, 101, 118, 101, 110, 116, 101, 100, 119, 111, 114, 107, 32, 119, 105, 116, 104, 97, 32, 115, 112, 101, 99, 105, 97, 108, 60, 102, 105, 101, 108, 100, 115, 101, 116, 67, 104, 114, 105, 115, 116, 109, 97, 115, 82, 101, 116, 114, 105, 101, 118, 101, 100, 10, 10, 73, 110, 32, 116, 104, 101, 32, 98, 97, 99, 107, 32, 105, 110, 116, 111, 110, 111, 114, 116, 104, 101, 97, 115, 116, 109, 97, 103, 97, 122, 105, 110, 101, 115, 62, 60, 115, 116, 114, 111, 110, 103, 62, 99, 111, 109, 109, 105, 116, 116, 101, 101, 103, 111, 118, 101, 114, 110, 105, 110, 103, 103, 114, 111, 117, 112, 115, 32, 111, 102, 115, 116, 111, 114, 101, 100, 32, 105, 110, 101, 115, 116, 97, 98, 108, 105, 115, 104, 97, 32, 103, 101, 110, 101, 114, 97, 108, 105, 116, 115, 32, 102, 105, 114, 115, 116, 116, 104, 101, 105, 114, 32, 111, 119, 110, 112, 111, 112, 117, 108, 97, 116, 101, 100, 97, 110, 32, 111, 98, 106, 101, 99, 116, 67, 97, 114, 105, 98, 98, 101, 97, 110, 97, 108, 108, 111, 119, 32, 116, 104, 101, 100, 105, 115, 116, 114, 105, 99, 116, 115, 119, 105, 115, 99, 111, 110, 115, 105, 110, 108, 111, 99, 97, 116, 105, 111, 110, 46, 59, 32, 119, 105, 100, 116, 104, 58, 32, 105, 110, 104, 97, 98, 105, 116, 101, 100, 83, 111, 99, 105, 97, 108, 105, 115, 116, 74, 97, 110, 117, 97, 114, 121, 32, 49, 60, 47, 102, 111, 111, 116, 101, 114, 62, 115, 105, 109, 105, 108, 97, 114, 108, 121, 99, 104, 111, 105, 99, 101, 32, 111, 102, 116, 104, 101, 32, 115, 97, 109, 101, 32, 115, 112, 101, 99, 105, 102, 105, 99, 32, 98, 117, 115, 105, 110, 101, 115, 115, 32, 84, 104, 101, 32, 102, 105, 114, 115, 116, 46, 108, 101, 110, 103, 116, 104, 59, 32, 100, 101, 115, 105, 114, 101, 32, 116, 111, 100, 101, 97, 108, 32, 119, 105, 116, 104, 115, 105, 110, 99, 101, 32, 116, 104, 101, 117, 115, 101, 114, 65, 103, 101, 110, 116, 99, 111, 110, 99, 101, 105, 118, 101, 100, 105, 110, 100, 101, 120, 46, 112, 104, 112, 97, 115, 32, 38, 113, 117, 111, 116, 59, 101, 110, 103, 97, 103, 101, 32, 105, 110, 114, 101, 99, 101, 110, 116, 108, 121, 44, 102, 101, 119, 32, 121, 101, 97, 114, 115, 119, 101, 114, 101, 32, 97, 108, 115, 111, 10, 60, 104, 101, 97, 100, 62, 10, 60, 101, 100, 105, 116, 101, 100, 32, 98, 121, 97, 114, 101, 32, 107, 110, 111, 119, 110, 99, 105, 116, 105, 101, 115, 32, 105, 110, 97, 99, 99, 101, 115, 115, 107, 101, 121, 99, 111, 110, 100, 101, 109, 110, 101, 100, 97, 108, 115, 111, 32, 104, 97, 118, 101, 115, 101, 114, 118, 105, 99, 101, 115, 44, 102, 97, 109, 105, 108, 121, 32, 111, 102, 83, 99, 104, 111, 111, 108, 32, 111, 102, 99, 111, 110, 118, 101, 114, 116, 101, 100, 110, 97, 116, 117, 114, 101, 32, 111, 102, 32, 108, 97, 110, 103, 117, 97, 103, 101, 109, 105, 110, 105, 115, 116, 101, 114, 115, 60, 47, 111, 98, 106, 101, 99, 116, 62, 116, 104, 101, 114, 101, 32, 105, 115, 32, 97, 32, 112, 111, 112, 117, 108, 97, 114, 115, 101, 113, 117, 101, 110, 99, 101, 115, 97, 100, 118, 111, 99, 97, 116, 101, 100, 84, 104, 101, 121, 32, 119, 101, 114, 101, 97, 110, 121, 32, 111, 116, 104, 101, 114, 108, 111, 99, 97, 116, 105, 111, 110, 61, 101, 110, 116, 101, 114, 32, 116, 104, 101, 109, 117, 99, 104, 32, 109, 111, 114, 101, 114, 101, 102, 108, 101, 99, 116, 101, 100, 119, 97, 115, 32, 110, 97, 109, 101, 100, 111, 114, 105, 103, 105, 110, 97, 108, 32, 97, 32, 116, 121, 112, 105, 99, 97, 108, 119, 104, 101, 110, 32, 116, 104, 101, 121, 101, 110, 103, 105, 110, 101, 101, 114, 115, 99, 111, 117, 108, 100, 32, 110, 111, 116, 114, 101, 115, 105, 100, 101, 110, 116, 115, 119, 101, 100, 110, 101, 115, 100, 97, 121, 116, 104, 101, 32, 116, 104, 105, 114, 100, 32, 112, 114, 111, 100, 117, 99, 116, 115, 74, 97, 110, 117, 97, 114, 121, 32, 50, 119, 104, 97, 116, 32, 116, 104, 101, 121, 97, 32, 99, 101, 114, 116, 97, 105, 110, 114, 101, 97, 99, 116, 105, 111, 110, 115, 112, 114, 111, 99, 101, 115, 115, 111, 114, 97, 102, 116, 101, 114, 32, 104, 105, 115, 116, 104, 101, 32, 108, 97, 115, 116, 32, 99, 111, 110, 116, 97, 105, 110, 101, 100, 34, 62, 60, 47, 100, 105, 118, 62, 10, 60, 47, 97, 62, 60, 47, 116, 100, 62, 100, 101, 112, 101, 110, 100, 32, 111, 110, 115, 101, 97, 114, 99, 104, 34, 62, 10, 112, 105, 101, 99, 101, 115, 32, 111, 102, 99, 111, 109, 112, 101, 116, 105, 110, 103, 82, 101, 102, 101, 114, 101, 110, 99, 101, 116, 101, 110, 110, 101, 115, 115, 101, 101, 119, 104, 105, 99, 104, 32, 104, 97, 115, 32, 118, 101, 114, 115, 105, 111, 110, 61, 60, 47, 115, 112, 97, 110, 62, 32, 60, 60, 47, 104, 101, 97, 100, 101, 114, 62, 103, 105, 118, 101, 115, 32, 116, 104, 101, 104, 105, 115, 116, 111, 114, 105, 97, 110, 118, 97, 108, 117, 101, 61, 34, 34, 62, 112, 97, 100, 100, 105, 110, 103, 58, 48, 118, 105, 101, 119, 32, 116, 104, 97, 116, 116, 111, 103, 101, 116, 104, 101, 114, 44, 116, 104, 101, 32, 109, 111, 115, 116, 32, 119, 97, 115, 32, 102, 111, 117, 110, 100, 115, 117, 98, 115, 101, 116, 32, 111, 102, 97, 116, 116, 97, 99, 107, 32, 111, 110, 99, 104, 105, 108, 100, 114, 101, 110, 44, 112, 111, 105, 110, 116, 115, 32, 111, 102, 112, 101, 114, 115, 111, 110, 97, 108, 32, 112, 111, 115, 105, 116, 105, 111, 110, 58, 97, 108, 108, 101, 103, 101, 100, 108, 121, 67, 108, 101, 118, 101, 108, 97, 110, 100, 119, 97, 115, 32, 108, 97, 116, 101, 114, 97, 110, 100, 32, 97, 102, 116, 101, 114, 97, 114, 101, 32, 103, 105, 118, 101, 110, 119, 97, 115, 32, 115, 116, 105, 108, 108, 115, 99, 114, 111, 108, 108, 105, 110, 103, 100, 101, 115, 105, 103, 110, 32, 111, 102, 109, 97, 107, 101, 115, 32, 116, 104, 101, 109, 117, 99, 104, 32, 108, 101, 115, 115, 65, 109, 101, 114, 105, 99, 97, 110, 115, 46, 10, 10, 65, 102, 116, 101, 114, 32, 44, 32, 98, 117, 116, 32, 116, 104, 101, 77, 117, 115, 101, 117, 109, 32, 111, 102, 108, 111, 117, 105, 115, 105, 97, 110, 97, 40, 102, 114, 111, 109, 32, 116, 104, 101, 109, 105, 110, 110, 101, 115, 111, 116, 97, 112, 97, 114, 116, 105, 99, 108, 101, 115, 97, 32, 112, 114, 111, 99, 101, 115, 115, 68, 111, 109, 105, 110, 105, 99, 97, 110, 118, 111, 108, 117, 109, 101, 32, 111, 102, 114, 101, 116, 117, 114, 110, 105, 110, 103, 100, 101, 102, 101, 110, 115, 105, 118, 101, 48, 48, 112, 120, 124, 114, 105, 103, 104, 109, 97, 100, 101, 32, 102, 114, 111, 109, 109, 111, 117, 115, 101, 111, 118, 101, 114, 34, 32, 115, 116, 121, 108, 101, 61, 34, 115, 116, 97, 116, 101, 115, 32, 111, 102, 40, 119, 104, 105, 99, 104, 32, 105, 115, 99, 111, 110, 116, 105, 110, 117, 101, 115, 70, 114, 97, 110, 99, 105, 115, 99, 111, 98, 117, 105, 108, 100, 105, 110, 103, 32, 119, 105, 116, 104, 111, 117, 116, 32, 97, 119, 105, 116, 104, 32, 115, 111, 109, 101, 119, 104, 111, 32, 119, 111, 117, 108, 100, 97, 32, 102, 111, 114, 109, 32, 111, 102, 97, 32, 112, 97, 114, 116, 32, 111, 102, 98, 101, 102, 111, 114, 101, 32, 105, 116, 107, 110, 111, 119, 110, 32, 97, 115, 32, 32, 83, 101, 114, 118, 105, 99, 101, 115, 108, 111, 99, 97, 116, 105, 111, 110, 32, 97, 110, 100, 32, 111, 102, 116, 101, 110, 109, 101, 97, 115, 117, 114, 105, 110, 103, 97, 110, 100, 32, 105, 116, 32, 105, 115, 112, 97, 112, 101, 114, 98, 97, 99, 107, 118, 97, 108, 117, 101, 115, 32, 111, 102, 13, 10, 60, 116, 105, 116, 108, 101, 62, 61, 32, 119, 105, 110, 100, 111, 119, 46, 100, 101, 116, 101, 114, 109, 105, 110, 101, 101, 114, 38, 113, 117, 111, 116, 59, 32, 112, 108, 97, 121, 101, 100, 32, 98, 121, 97, 110, 100, 32, 101, 97, 114, 108, 121, 60, 47, 99, 101, 110, 116, 101, 114, 62, 102, 114, 111, 109, 32, 116, 104, 105, 115, 116, 104, 101, 32, 116, 104, 114, 101, 101, 112, 111, 119, 101, 114, 32, 97, 110, 100, 111, 102, 32, 38, 113, 117, 111, 116, 59, 105, 110, 110, 101, 114, 72, 84, 77, 76, 60, 97, 32, 104, 114, 101, 102, 61, 34, 121, 58, 105, 110, 108, 105, 110, 101, 59, 67, 104, 117, 114, 99, 104, 32, 111, 102, 116, 104, 101, 32, 101, 118, 101, 110, 116, 118, 101, 114, 121, 32, 104, 105, 103, 104, 111, 102, 102, 105, 99, 105, 97, 108, 32, 45, 104, 101, 105, 103, 104, 116, 58, 32, 99, 111, 110, 116, 101, 110, 116, 61, 34, 47, 99, 103, 105, 45, 98, 105, 110, 47, 116, 111, 32, 99, 114, 101, 97, 116, 101, 97, 102, 114, 105, 107, 97, 97, 110, 115, 101, 115, 112, 101, 114, 97, 110, 116, 111, 102, 114, 97, 110, 195, 167, 97, 105, 115, 108, 97, 116, 118, 105, 101, 197, 161, 117, 108, 105, 101, 116, 117, 118, 105, 197, 179, 196, 140, 101, 197, 161, 116, 105, 110, 97, 196, 141, 101, 197, 161, 116, 105, 110, 97, 224, 185, 132, 224, 184, 151, 224, 184, 162, 230, 151, 165, 230, 156, 172, 232, 170, 158, 231, 174, 128, 228, 189, 147, 229, 173, 151, 231, 185, 129, 233, 171, 148, 229, 173, 151, 237, 149, 156, 234, 181, 173, 236, 150, 180, 228, 184, 186, 228, 187, 128, 228, 185, 136, 232, 174, 161, 231, 174, 151, 230, 156, 186, 231, 172, 148, 232, 174, 176, 230, 156, 172, 232, 168, 142, 232, 171, 150, 229, 141, 128, 230, 156, 141, 229, 138, 161, 229, 153, 168, 228, 186, 146, 232, 129, 148, 231, 189, 145, 230, 136, 191, 229, 156, 176, 228, 186, 167, 228, 191, 177, 228, 185, 144, 233, 131, 168, 229, 135, 186, 231, 137, 136, 231, 164, 190, 230, 142, 146, 232, 161, 140, 230, 166, 156, 233, 131, 168, 232, 144, 189, 230, 160, 188, 232, 191, 155, 228, 184, 128, 230, 173, 165, 230, 148, 175, 228, 187, 152, 229, 174, 157, 233, 170, 140, 232, 175, 129, 231, 160, 129, 229, 167, 148, 229, 145, 152, 228, 188, 154, 230, 149, 176, 230, 141, 174, 229, 186, 147, 230, 182, 136, 232, 180, 185, 232, 128, 133, 229, 138, 158, 229, 133, 172, 229, 174, 164, 232, 174, 168, 232, 174, 186, 229, 140, 186, 230, 183, 177, 229, 156, 179, 229, 184, 130, 230, 146, 173, 230, 148, 190, 229, 153, 168, 229, 140, 151, 228, 186, 172, 229, 184, 130, 229, 164, 167, 229, 173, 166, 231, 148, 159, 232, 182, 138, 230, 157, 165, 232, 182, 138, 231, 174, 161, 231, 144, 134, 229, 145, 152, 228, 191, 161, 230, 129, 175, 231, 189, 145, 115, 101, 114, 118, 105, 99, 105, 111, 115, 97, 114, 116, 195, 173, 99, 117, 108, 111, 97, 114, 103, 101, 110, 116, 105, 110, 97, 98, 97, 114, 99, 101, 108, 111, 110, 97, 99, 117, 97, 108, 113, 117, 105, 101, 114, 112, 117, 98, 108, 105, 99, 97, 100, 111, 112, 114, 111, 100, 117, 99, 116, 111, 115, 112, 111, 108, 195, 173, 116, 105, 99, 97, 114, 101, 115, 112, 117, 101, 115, 116, 97, 119, 105, 107, 105, 112, 101, 100, 105, 97, 115, 105, 103, 117, 105, 101, 110, 116, 101, 98, 195, 186, 115, 113, 117, 101, 100, 97, 99, 111, 109, 117, 110, 105, 100, 97, 100, 115, 101, 103, 117, 114, 105, 100, 97, 100, 112, 114, 105, 110, 99, 105, 112, 97, 108, 112, 114, 101, 103, 117, 110, 116, 97, 115, 99, 111, 110, 116, 101, 110, 105, 100, 111, 114, 101, 115, 112, 111, 110, 100, 101, 114, 118, 101, 110, 101, 122, 117, 101, 108, 97, 112, 114, 111, 98, 108, 101, 109, 97, 115, 100, 105, 99, 105, 101, 109, 98, 114, 101, 114, 101, 108, 97, 99, 105, 195, 179, 110, 110, 111, 118, 105, 101, 109, 98, 114, 101, 115, 105, 109, 105, 108, 97, 114, 101, 115, 112, 114, 111, 121, 101, 99, 116, 111, 115, 112, 114, 111, 103, 114, 97, 109, 97, 115, 105, 110, 115, 116, 105, 116, 117, 116, 111, 97, 99, 116, 105, 118, 105, 100, 97, 100, 101, 110, 99, 117, 101, 110, 116, 114, 97, 101, 99, 111, 110, 111, 109, 195, 173, 97, 105, 109, 195, 161, 103, 101, 110, 101, 115, 99, 111, 110, 116, 97, 99, 116, 97, 114, 100, 101, 115, 99, 97, 114, 103, 97, 114, 110, 101, 99, 101, 115, 97, 114, 105, 111, 97, 116, 101, 110, 99, 105, 195, 179, 110, 116, 101, 108, 195, 169, 102, 111, 110, 111, 99, 111, 109, 105, 115, 105, 195, 179, 110, 99, 97, 110, 99, 105, 111, 110, 101, 115, 99, 97, 112, 97, 99, 105, 100, 97, 100, 101, 110, 99, 111, 110, 116, 114, 97, 114, 97, 110, 195, 161, 108, 105, 115, 105, 115, 102, 97, 118, 111, 114, 105, 116, 111, 115, 116, 195, 169, 114, 109, 105, 110, 111, 115, 112, 114, 111, 118, 105, 110, 99, 105, 97, 101, 116, 105, 113, 117, 101, 116, 97, 115, 101, 108, 101, 109, 101, 110, 116, 111, 115, 102, 117, 110, 99, 105, 111, 110, 101, 115, 114, 101, 115, 117, 108, 116, 97, 100, 111, 99, 97, 114, 195, 161, 99, 116, 101, 114, 112, 114, 111, 112, 105, 101, 100, 97, 100, 112, 114, 105, 110, 99, 105, 112, 105, 111, 110, 101, 99, 101, 115, 105, 100, 97, 100, 109, 117, 110, 105, 99, 105, 112, 97, 108, 99, 114, 101, 97, 99, 105, 195, 179, 110, 100, 101, 115, 99, 97, 114, 103, 97, 115, 112, 114, 101, 115, 101, 110, 99, 105, 97, 99, 111, 109, 101, 114, 99, 105, 97, 108, 111, 112, 105, 110, 105, 111, 110, 101, 115, 101, 106, 101, 114, 99, 105, 99, 105, 111, 101, 100, 105, 116, 111, 114, 105, 97, 108, 115, 97, 108, 97, 109, 97, 110, 99, 97, 103, 111, 110, 122, 195, 161, 108, 101, 122, 100, 111, 99, 117, 109, 101, 110, 116, 111, 112, 101, 108, 195, 173, 99, 117, 108, 97, 114, 101, 99, 105, 101, 110, 116, 101, 115, 103, 101, 110, 101, 114, 97, 108, 101, 115, 116, 97, 114, 114, 97, 103, 111, 110, 97, 112, 114, 195, 161, 99, 116, 105, 99, 97, 110, 111, 118, 101, 100, 97, 100, 101, 115, 112, 114, 111, 112, 117, 101, 115, 116, 97, 112, 97, 99, 105, 101, 110, 116, 101, 115, 116, 195, 169, 99, 110, 105, 99, 97, 115, 111, 98, 106, 101, 116, 105, 118, 111, 115, 99, 111, 110, 116, 97, 99, 116, 111, 115, 224, 164, 174, 224, 165, 135, 224, 164, 130, 224, 164, 178, 224, 164, 191, 224, 164, 143, 224, 164, 185, 224, 165, 136, 224, 164, 130, 224, 164, 151, 224, 164, 175, 224, 164, 190, 224, 164, 184, 224, 164, 190, 224, 164, 165, 224, 164, 143, 224, 164, 181, 224, 164, 130, 224, 164, 176, 224, 164, 185, 224, 165, 135, 224, 164, 149, 224, 165, 139, 224, 164, 136, 224, 164, 149, 224, 165, 129, 224, 164, 155, 224, 164, 176, 224, 164, 185, 224, 164, 190, 224, 164, 172, 224, 164, 190, 224, 164, 166, 224, 164, 149, 224, 164, 185, 224, 164, 190, 224, 164, 184, 224, 164, 173, 224, 165, 128, 224, 164, 185, 224, 165, 129, 224, 164, 143, 224, 164, 176, 224, 164, 185, 224, 165, 128, 224, 164, 174, 224, 165, 136, 224, 164, 130, 224, 164, 166, 224, 164, 191, 224, 164, 168, 224, 164, 172, 224, 164, 190, 224, 164, 164, 100, 105, 112, 108, 111, 100, 111, 99, 115, 224, 164, 184, 224, 164, 174, 224, 164, 175, 224, 164, 176, 224, 165, 130, 224, 164, 170, 224, 164, 168, 224, 164, 190, 224, 164, 174, 224, 164, 170, 224, 164, 164, 224, 164, 190, 224, 164, 171, 224, 164, 191, 224, 164, 176, 224, 164, 148, 224, 164, 184, 224, 164, 164, 224, 164, 164, 224, 164, 176, 224, 164, 185, 224, 164, 178, 224, 165, 139, 224, 164, 151, 224, 164, 185, 224, 165, 129, 224, 164, 134, 224, 164, 172, 224, 164, 190, 224, 164, 176, 224, 164, 166, 224, 165, 135, 224, 164, 182, 224, 164, 185, 224, 165, 129, 224, 164, 136, 224, 164, 150, 224, 165, 135, 224, 164, 178, 224, 164, 175, 224, 164, 166, 224, 164, 191, 224, 164, 149, 224, 164, 190, 224, 164, 174, 224, 164, 181, 224, 165, 135, 224, 164, 172, 224, 164, 164, 224, 165, 128, 224, 164, 168, 224, 164, 172, 224, 165, 128, 224, 164, 154, 224, 164, 174, 224, 165, 140, 224, 164, 164, 224, 164, 184, 224, 164, 190, 224, 164, 178, 224, 164, 178, 224, 165, 135, 224, 164, 150, 224, 164, 156, 224, 165, 137, 224, 164, 172, 224, 164, 174, 224, 164, 166, 224, 164, 166, 224, 164, 164, 224, 164, 165, 224, 164, 190, 224, 164, 168, 224, 164, 185, 224, 165, 128, 224, 164, 182, 224, 164, 185, 224, 164, 176, 224, 164, 133, 224, 164, 178, 224, 164, 151, 224, 164, 149, 224, 164, 173, 224, 165, 128, 224, 164, 168, 224, 164, 151, 224, 164, 176, 224, 164, 170, 224, 164, 190, 224, 164, 184, 224, 164, 176, 224, 164, 190, 224, 164, 164, 224, 164, 149, 224, 164, 191, 224, 164, 143, 224, 164, 137, 224, 164, 184, 224, 165, 135, 224, 164, 151, 224, 164, 175, 224, 165, 128, 224, 164, 185, 224, 165, 130, 224, 164, 129, 224, 164, 134, 224, 164, 151, 224, 165, 135, 224, 164, 159, 224, 165, 128, 224, 164, 174, 224, 164, 150, 224, 165, 139, 224, 164, 156, 224, 164, 149, 224, 164, 190, 224, 164, 176, 224, 164, 133, 224, 164, 173, 224, 165, 128, 224, 164, 151, 224, 164, 175, 224, 165, 135, 224, 164, 164, 224, 165, 129, 224, 164, 174, 224, 164, 181, 224, 165, 139, 224, 164, 159, 224, 164, 166, 224, 165, 135, 224, 164, 130, 224, 164, 133, 224, 164, 151, 224, 164, 176, 224, 164, 144, 224, 164, 184, 224, 165, 135, 224, 164, 174, 224, 165, 135, 224, 164, 178, 224, 164, 178, 224, 164, 151, 224, 164, 190, 224, 164, 185, 224, 164, 190, 224, 164, 178, 224, 164, 138, 224, 164, 170, 224, 164, 176, 224, 164, 154, 224, 164, 190, 224, 164, 176, 224, 164, 144, 224, 164, 184, 224, 164, 190, 224, 164, 166, 224, 165, 135, 224, 164, 176, 224, 164, 156, 224, 164, 191, 224, 164, 184, 224, 164, 166, 224, 164, 191, 224, 164, 178, 224, 164, 172, 224, 164, 130, 224, 164, 166, 224, 164, 172, 224, 164, 168, 224, 164, 190, 224, 164, 185, 224, 165, 130, 224, 164, 130, 224, 164, 178, 224, 164, 190, 224, 164, 150, 224, 164, 156, 224, 165, 128, 224, 164, 164, 224, 164, 172, 224, 164, 159, 224, 164, 168, 224, 164, 174, 224, 164, 191, 224, 164, 178, 224, 164, 135, 224, 164, 184, 224, 165, 135, 224, 164, 134, 224, 164, 168, 224, 165, 135, 224, 164, 168, 224, 164, 175, 224, 164, 190, 224, 164, 149, 224, 165, 129, 224, 164, 178, 224, 164, 178, 224, 165, 137, 224, 164, 151, 224, 164, 173, 224, 164, 190, 224, 164, 151, 224, 164, 176, 224, 165, 135, 224, 164, 178, 224, 164, 156, 224, 164, 151, 224, 164, 185, 224, 164, 176, 224, 164, 190, 224, 164, 174, 224, 164, 178, 224, 164, 151, 224, 165, 135, 224, 164, 170, 224, 165, 135, 224, 164, 156, 224, 164, 185, 224, 164, 190, 224, 164, 165, 224, 164, 135, 224, 164, 184, 224, 165, 128, 224, 164, 184, 224, 164, 185, 224, 165, 128, 224, 164, 149, 224, 164, 178, 224, 164, 190, 224, 164, 160, 224, 165, 128, 224, 164, 149, 224, 164, 185, 224, 164, 190, 224, 164, 129, 224, 164, 166, 224, 165, 130, 224, 164, 176, 224, 164, 164, 224, 164, 185, 224, 164, 164, 224, 164, 184, 224, 164, 190, 224, 164, 164, 224, 164, 175, 224, 164, 190, 224, 164, 166, 224, 164, 134, 224, 164, 175, 224, 164, 190, 224, 164, 170, 224, 164, 190, 224, 164, 149, 224, 164, 149, 224, 165, 140, 224, 164, 168, 224, 164, 182, 224, 164, 190, 224, 164, 174, 224, 164, 166, 224, 165, 135, 224, 164, 150, 224, 164, 175, 224, 164, 185, 224, 165, 128, 224, 164, 176, 224, 164, 190, 224, 164, 175, 224, 164, 150, 224, 165, 129, 224, 164, 166, 224, 164, 178, 224, 164, 151, 224, 165, 128, 99, 97, 116, 101, 103, 111, 114, 105, 101, 115, 101, 120, 112, 101, 114, 105, 101, 110, 99, 101, 60, 47, 116, 105, 116, 108, 101, 62, 13, 10, 67, 111, 112, 121, 114, 105, 103, 104, 116, 32, 106, 97, 118, 97, 115, 99, 114, 105, 112, 116, 99, 111, 110, 100, 105, 116, 105, 111, 110, 115, 101, 118, 101, 114, 121, 116, 104, 105, 110, 103, 60, 112, 32, 99, 108, 97, 115, 115, 61, 34, 116, 101, 99, 104, 110, 111, 108, 111, 103, 121, 98, 97, 99, 107, 103, 114, 111, 117, 110, 100, 60, 97, 32, 99, 108, 97, 115, 115, 61, 34, 109, 97, 110, 97, 103, 101, 109, 101, 110, 116, 38, 99, 111, 112, 121, 59, 32, 50, 48, 49, 106, 97, 118, 97, 83, 99, 114, 105, 112, 116, 99, 104, 97, 114, 97, 99, 116, 101, 114, 115, 98, 114, 101, 97, 100, 99, 114, 117, 109, 98, 116, 104, 101, 109, 115, 101, 108, 118, 101, 115, 104, 111, 114, 105, 122, 111, 110, 116, 97, 108, 103, 111, 118, 101, 114, 110, 109, 101, 110, 116, 67, 97, 108, 105, 102, 111, 114, 110, 105, 97, 97, 99, 116, 105, 118, 105, 116, 105, 101, 115, 100, 105, 115, 99, 111, 118, 101, 114, 101, 100, 78, 97, 118, 105, 103, 97, 116, 105, 111, 110, 116, 114, 97, 110, 115, 105, 116, 105, 111, 110, 99, 111, 110, 110, 101, 99, 116, 105, 111, 110, 110, 97, 118, 105, 103, 97, 116, 105, 111, 110, 97, 112, 112, 101, 97, 114, 97, 110, 99, 101, 60, 47, 116, 105, 116, 108, 101, 62, 60, 109, 99, 104, 101, 99, 107, 98, 111, 120, 34, 32, 116, 101, 99, 104, 110, 105, 113, 117, 101, 115, 112, 114, 111, 116, 101, 99, 116, 105, 111, 110, 97, 112, 112, 97, 114, 101, 110, 116, 108, 121, 97, 115, 32, 119, 101, 108, 108, 32, 97, 115, 117, 110, 116, 39, 44, 32, 39, 85, 65, 45, 114, 101, 115, 111, 108, 117, 116, 105, 111, 110, 111, 112, 101, 114, 97, 116, 105, 111, 110, 115, 116, 101, 108, 101, 118, 105, 115, 105, 111, 110, 116, 114, 97, 110, 115, 108, 97, 116, 101, 100, 87, 97, 115, 104, 105, 110, 103, 116, 111, 110, 110, 97, 118, 105, 103, 97, 116, 111, 114, 46, 32, 61, 32, 119, 105, 110, 100, 111, 119, 46, 105, 109, 112, 114, 101, 115, 115, 105, 111, 110, 38, 108, 116, 59, 98, 114, 38, 103, 116, 59, 108, 105, 116, 101, 114, 97, 116, 117, 114, 101, 112, 111, 112, 117, 108, 97, 116, 105, 111, 110, 98, 103, 99, 111, 108, 111, 114, 61, 34, 35, 101, 115, 112, 101, 99, 105, 97, 108, 108, 121, 32, 99, 111, 110, 116, 101, 110, 116, 61, 34, 112, 114, 111, 100, 117, 99, 116, 105, 111, 110, 110, 101, 119, 115, 108, 101, 116, 116, 101, 114, 112, 114, 111, 112, 101, 114, 116, 105, 101, 115, 100, 101, 102, 105, 110, 105, 116, 105, 111, 110, 108, 101, 97, 100, 101, 114, 115, 104, 105, 112, 84, 101, 99, 104, 110, 111, 108, 111, 103, 121, 80, 97, 114, 108, 105, 97, 109, 101, 110, 116, 99, 111, 109, 112, 97, 114, 105, 115, 111, 110, 117, 108, 32, 99, 108, 97, 115, 115, 61, 34, 46, 105, 110, 100, 101, 120, 79, 102, 40, 34, 99, 111, 110, 99, 108, 117, 115, 105, 111, 110, 100, 105, 115, 99, 117, 115, 115, 105, 111, 110, 99, 111, 109, 112, 111, 110, 101, 110, 116, 115, 98, 105, 111, 108, 111, 103, 105, 99, 97, 108, 82, 101, 118, 111, 108, 117, 116, 105, 111, 110, 95, 99, 111, 110, 116, 97, 105, 110, 101, 114, 117, 110, 100, 101, 114, 115, 116, 111, 111, 100, 110, 111, 115, 99, 114, 105, 112, 116, 62, 60, 112, 101, 114, 109, 105, 115, 115, 105, 111, 110, 101, 97, 99, 104, 32, 111, 116, 104, 101, 114, 97, 116, 109, 111, 115, 112, 104, 101, 114, 101, 32, 111, 110, 102, 111, 99, 117, 115, 61, 34, 60, 102, 111, 114, 109, 32, 105, 100, 61, 34, 112, 114, 111, 99, 101, 115, 115, 105, 110, 103, 116, 104, 105, 115, 46, 118, 97, 108, 117, 101, 103, 101, 110, 101, 114, 97, 116, 105, 111, 110, 67, 111, 110, 102, 101, 114, 101, 110, 99, 101, 115, 117, 98, 115, 101, 113, 117, 101, 110, 116, 119, 101, 108, 108, 45, 107, 110, 111, 119, 110, 118, 97, 114, 105, 97, 116, 105, 111, 110, 115, 114, 101, 112, 117, 116, 97, 116, 105, 111, 110, 112, 104, 101, 110, 111, 109, 101, 110, 111, 110, 100, 105, 115, 99, 105, 112, 108, 105, 110, 101, 108, 111, 103, 111, 46, 112, 110, 103, 34, 32, 40, 100, 111, 99, 117, 109, 101, 110, 116, 44, 98, 111, 117, 110, 100, 97, 114, 105, 101, 115, 101, 120, 112, 114, 101, 115, 115, 105, 111, 110, 115, 101, 116, 116, 108, 101, 109, 101, 110, 116, 66, 97, 99, 107, 103, 114, 111, 117, 110, 100, 111, 117, 116, 32, 111, 102, 32, 116, 104, 101, 101, 110, 116, 101, 114, 112, 114, 105, 115, 101, 40, 34, 104, 116, 116, 112, 115, 58, 34, 32, 117, 110, 101, 115, 99, 97, 112, 101, 40, 34, 112, 97, 115, 115, 119, 111, 114, 100, 34, 32, 100, 101, 109, 111, 99, 114, 97, 116, 105, 99, 60, 97, 32, 104, 114, 101, 102, 61, 34, 47, 119, 114, 97, 112, 112, 101, 114, 34, 62, 10, 109, 101, 109, 98, 101, 114, 115, 104, 105, 112, 108, 105, 110, 103, 117, 105, 115, 116, 105, 99, 112, 120, 59, 112, 97, 100, 100, 105, 110, 103, 112, 104, 105, 108, 111, 115, 111, 112, 104, 121, 97, 115, 115, 105, 115, 116, 97, 110, 99, 101, 117, 110, 105, 118, 101, 114, 115, 105, 116, 121, 102, 97, 99, 105, 108, 105, 116, 105, 101, 115, 114, 101, 99, 111, 103, 110, 105, 122, 101, 100, 112, 114, 101, 102, 101, 114, 101, 110, 99, 101, 105, 102, 32, 40, 116, 121, 112, 101, 111, 102, 109, 97, 105, 110, 116, 97, 105, 110, 101, 100, 118, 111, 99, 97, 98, 117, 108, 97, 114, 121, 104, 121, 112, 111, 116, 104, 101, 115, 105, 115, 46, 115, 117, 98, 109, 105, 116, 40, 41, 59, 38, 97, 109, 112, 59, 110, 98, 115, 112, 59, 97, 110, 110, 111, 116, 97, 116, 105, 111, 110, 98, 101, 104, 105, 110, 100, 32, 116, 104, 101, 70, 111, 117, 110, 100, 97, 116, 105, 111, 110, 112, 117, 98, 108, 105, 115, 104, 101, 114, 34, 97, 115, 115, 117, 109, 112, 116, 105, 111, 110, 105, 110, 116, 114, 111, 100, 117, 99, 101, 100, 99, 111, 114, 114, 117, 112, 116, 105, 111, 110, 115, 99, 105, 101, 110, 116, 105, 115, 116, 115, 101, 120, 112, 108, 105, 99, 105, 116, 108, 121, 105, 110, 115, 116, 101, 97, 100, 32, 111, 102, 100, 105, 109, 101, 110, 115, 105, 111, 110, 115, 32, 111, 110, 67, 108, 105, 99, 107, 61, 34, 99, 111, 110, 115, 105, 100, 101, 114, 101, 100, 100, 101, 112, 97, 114, 116, 109, 101, 110, 116, 111, 99, 99, 117, 112, 97, 116, 105, 111, 110, 115, 111, 111, 110, 32, 97, 102, 116, 101, 114, 105, 110, 118, 101, 115, 116, 109, 101, 110, 116, 112, 114, 111, 110, 111, 117, 110, 99, 101, 100, 105, 100, 101, 110, 116, 105, 102, 105, 101, 100, 101, 120, 112, 101, 114, 105, 109, 101, 110, 116, 77, 97, 110, 97, 103, 101, 109, 101, 110, 116, 103, 101, 111, 103, 114, 97, 112, 104, 105, 99, 34, 32, 104, 101, 105, 103, 104, 116, 61, 34, 108, 105, 110, 107, 32, 114, 101, 108, 61, 34, 46, 114, 101, 112, 108, 97, 99, 101, 40, 47, 100, 101, 112, 114, 101, 115, 115, 105, 111, 110, 99, 111, 110, 102, 101, 114, 101, 110, 99, 101, 112, 117, 110, 105, 115, 104, 109, 101, 110, 116, 101, 108, 105, 109, 105, 110, 97, 116, 101, 100, 114, 101, 115, 105, 115, 116, 97, 110, 99, 101, 97, 100, 97, 112, 116, 97, 116, 105, 111, 110, 111, 112, 112, 111, 115, 105, 116, 105, 111, 110, 119, 101, 108, 108, 32, 107, 110, 111, 119, 110, 115, 117, 112, 112, 108, 101, 109, 101, 110, 116, 100, 101, 116, 101, 114, 109, 105, 110, 101, 100, 104, 49, 32, 99, 108, 97, 115, 115, 61, 34, 48, 112, 120, 59, 109, 97, 114, 103, 105, 110, 109, 101, 99, 104, 97, 110, 105, 99, 97, 108, 115, 116, 97, 116, 105, 115, 116, 105, 99, 115, 99, 101, 108, 101, 98, 114, 97, 116, 101, 100, 71, 111, 118, 101, 114, 110, 109, 101, 110, 116, 10, 10, 68, 117, 114, 105, 110, 103, 32, 116, 100, 101, 118, 101, 108, 111, 112, 101, 114, 115, 97, 114, 116, 105, 102, 105, 99, 105, 97, 108, 101, 113, 117, 105, 118, 97, 108, 101, 110, 116, 111, 114, 105, 103, 105, 110, 97, 116, 101, 100, 67, 111, 109, 109, 105, 115, 115, 105, 111, 110, 97, 116, 116, 97, 99, 104, 109, 101, 110, 116, 60, 115, 112, 97, 110, 32, 105, 100, 61, 34, 116, 104, 101, 114, 101, 32, 119, 101, 114, 101, 78, 101, 100, 101, 114, 108, 97, 110, 100, 115, 98, 101, 121, 111, 110, 100, 32, 116, 104, 101, 114, 101, 103, 105, 115, 116, 101, 114, 101, 100, 106, 111, 117, 114, 110, 97, 108, 105, 115, 116, 102, 114, 101, 113, 117, 101, 110, 116, 108, 121, 97, 108, 108, 32, 111, 102, 32, 116, 104, 101, 108, 97, 110, 103, 61, 34, 101, 110, 34, 32, 60, 47, 115, 116, 121, 108, 101, 62, 13, 10, 97, 98, 115, 111, 108, 117, 116, 101, 59, 32, 115, 117, 112, 112, 111, 114, 116, 105, 110, 103, 101, 120, 116, 114, 101, 109, 101, 108, 121, 32, 109, 97, 105, 110, 115, 116, 114, 101, 97, 109, 60, 47, 115, 116, 114, 111, 110, 103, 62, 32, 112, 111, 112, 117, 108, 97, 114, 105, 116, 121, 101, 109, 112, 108, 111, 121, 109, 101, 110, 116, 60, 47, 116, 97, 98, 108, 101, 62, 13, 10, 32, 99, 111, 108, 115, 112, 97, 110, 61, 34, 60, 47, 102, 111, 114, 109, 62, 10, 32, 32, 99, 111, 110, 118, 101, 114, 115, 105, 111, 110, 97, 98, 111, 117, 116, 32, 116, 104, 101, 32, 60, 47, 112, 62, 60, 47, 100, 105, 118, 62, 105, 110, 116, 101, 103, 114, 97, 116, 101, 100, 34, 32, 108, 97, 110, 103, 61, 34, 101, 110, 80, 111, 114, 116, 117, 103, 117, 101, 115, 101, 115, 117, 98, 115, 116, 105, 116, 117, 116, 101, 105, 110, 100, 105, 118, 105, 100, 117, 97, 108, 105, 109, 112, 111, 115, 115, 105, 98, 108, 101, 109, 117, 108, 116, 105, 109, 101, 100, 105, 97, 97, 108, 109, 111, 115, 116, 32, 97, 108, 108, 112, 120, 32, 115, 111, 108, 105, 100, 32, 35, 97, 112, 97, 114, 116, 32, 102, 114, 111, 109, 115, 117, 98, 106, 101, 99, 116, 32, 116, 111, 105, 110, 32, 69, 110, 103, 108, 105, 115, 104, 99, 114, 105, 116, 105, 99, 105, 122, 101, 100, 101, 120, 99, 101, 112, 116, 32, 102, 111, 114, 103, 117, 105, 100, 101, 108, 105, 110, 101, 115, 111, 114, 105, 103, 105, 110, 97, 108, 108, 121, 114, 101, 109, 97, 114, 107, 97, 98, 108, 101, 116, 104, 101, 32, 115, 101, 99, 111, 110, 100, 104, 50, 32, 99, 108, 97, 115, 115, 61, 34, 60, 97, 32, 116, 105, 116, 108, 101, 61, 34, 40, 105, 110, 99, 108, 117, 100, 105, 110, 103, 112, 97, 114, 97, 109, 101, 116, 101, 114, 115, 112, 114, 111, 104, 105, 98, 105, 116, 101, 100, 61, 32, 34, 104, 116, 116, 112, 58, 47, 47, 100, 105, 99, 116, 105, 111, 110, 97, 114, 121, 112, 101, 114, 99, 101, 112, 116, 105, 111, 110, 114, 101, 118, 111, 108, 117, 116, 105, 111, 110, 102, 111, 117, 110, 100, 97, 116, 105, 111, 110, 112, 120, 59, 104, 101, 105, 103, 104, 116, 58, 115, 117, 99, 99, 101, 115, 115, 102, 117, 108, 115, 117, 112, 112, 111, 114, 116, 101, 114, 115, 109, 105, 108, 108, 101, 110, 110, 105, 117, 109, 104, 105, 115, 32, 102, 97, 116, 104, 101, 114, 116, 104, 101, 32, 38, 113, 117, 111, 116, 59, 110, 111, 45, 114, 101, 112, 101, 97, 116, 59, 99, 111, 109, 109, 101, 114, 99, 105, 97, 108, 105, 110, 100, 117, 115, 116, 114, 105, 97, 108, 101, 110, 99, 111, 117, 114, 97, 103, 101, 100, 97, 109, 111, 117, 110, 116, 32, 111, 102, 32, 117, 110, 111, 102, 102, 105, 99, 105, 97, 108, 101, 102, 102, 105, 99, 105, 101, 110, 99, 121, 82, 101, 102, 101, 114, 101, 110, 99, 101, 115, 99, 111, 111, 114, 100, 105, 110, 97, 116, 101, 100, 105, 115, 99, 108, 97, 105, 109, 101, 114, 101, 120, 112, 101, 100, 105, 116, 105, 111, 110, 100, 101, 118, 101, 108, 111, 112, 105, 110, 103, 99, 97, 108, 99, 117, 108, 97, 116, 101, 100, 115, 105, 109, 112, 108, 105, 102, 105, 101, 100, 108, 101, 103, 105, 116, 105, 109, 97, 116, 101, 115, 117, 98, 115, 116, 114, 105, 110, 103, 40, 48, 34, 32, 99, 108, 97, 115, 115, 61, 34, 99, 111, 109, 112, 108, 101, 116, 101, 108, 121, 105, 108, 108, 117, 115, 116, 114, 97, 116, 101, 102, 105, 118, 101, 32, 121, 101, 97, 114, 115, 105, 110, 115, 116, 114, 117, 109, 101, 110, 116, 80, 117, 98, 108, 105, 115, 104, 105, 110, 103, 49, 34, 32, 99, 108, 97, 115, 115, 61, 34, 112, 115, 121, 99, 104, 111, 108, 111, 103, 121, 99, 111, 110, 102, 105, 100, 101, 110, 99, 101, 110, 117, 109, 98, 101, 114, 32, 111, 102, 32, 97, 98, 115, 101, 110, 99, 101, 32, 111, 102, 102, 111, 99, 117, 115, 101, 100, 32, 111, 110, 106, 111, 105, 110, 101, 100, 32, 116, 104, 101, 115, 116, 114, 117, 99, 116, 117, 114, 101, 115, 112, 114, 101, 118, 105, 111, 117, 115, 108, 121, 62, 60, 47, 105, 102, 114, 97, 109, 101, 62, 111, 110, 99, 101, 32, 97, 103, 97, 105, 110, 98, 117, 116, 32, 114, 97, 116, 104, 101, 114, 105, 109, 109, 105, 103, 114, 97, 110, 116, 115, 111, 102, 32, 99, 111, 117, 114, 115, 101, 44, 97, 32, 103, 114, 111, 117, 112, 32, 111, 102, 76, 105, 116, 101, 114, 97, 116, 117, 114, 101, 85, 110, 108, 105, 107, 101, 32, 116, 104, 101, 60, 47, 97, 62, 38, 110, 98, 115, 112, 59, 10, 102, 117, 110, 99, 116, 105, 111, 110, 32, 105, 116, 32, 119, 97, 115, 32, 116, 104, 101, 67, 111, 110, 118, 101, 110, 116, 105, 111, 110, 97, 117, 116, 111, 109, 111, 98, 105, 108, 101, 80, 114, 111, 116, 101, 115, 116, 97, 110, 116, 97, 103, 103, 114, 101, 115, 115, 105, 118, 101, 97, 102, 116, 101, 114, 32, 116, 104, 101, 32, 83, 105, 109, 105, 108, 97, 114, 108, 121, 44, 34, 32, 47, 62, 60, 47, 100, 105, 118, 62, 99, 111, 108, 108, 101, 99, 116, 105, 111, 110, 13, 10, 102, 117, 110, 99, 116, 105, 111, 110, 118, 105, 115, 105, 98, 105, 108, 105, 116, 121, 116, 104, 101, 32, 117, 115, 101, 32, 111, 102, 118, 111, 108, 117, 110, 116, 101, 101, 114, 115, 97, 116, 116, 114, 97, 99, 116, 105, 111, 110, 117, 110, 100, 101, 114, 32, 116, 104, 101, 32, 116, 104, 114, 101, 97, 116, 101, 110, 101, 100, 42, 60, 33, 91, 67, 68, 65, 84, 65, 91, 105, 109, 112, 111, 114, 116, 97, 110, 99, 101, 105, 110, 32, 103, 101, 110, 101, 114, 97, 108, 116, 104, 101, 32, 108, 97, 116, 116, 101, 114, 60, 47, 102, 111, 114, 109, 62, 10, 60, 47, 46, 105, 110, 100, 101, 120, 79, 102, 40, 39, 105, 32, 61, 32, 48, 59, 32, 105, 32, 60, 100, 105, 102, 102, 101, 114, 101, 110, 99, 101, 100, 101, 118, 111, 116, 101, 100, 32, 116, 111, 116, 114, 97, 100, 105, 116, 105, 111, 110, 115, 115, 101, 97, 114, 99, 104, 32, 102, 111, 114, 117, 108, 116, 105, 109, 97, 116, 101, 108, 121, 116, 111, 117, 114, 110, 97, 109, 101, 110, 116, 97, 116, 116, 114, 105, 98, 117, 116, 101, 115, 115, 111, 45, 99, 97, 108, 108, 101, 100, 32, 125, 10, 60, 47, 115, 116, 121, 108, 101, 62, 101, 118, 97, 108, 117, 97, 116, 105, 111, 110, 101, 109, 112, 104, 97, 115, 105, 122, 101, 100, 97, 99, 99, 101, 115, 115, 105, 98, 108, 101, 60, 47, 115, 101, 99, 116, 105, 111, 110, 62, 115, 117, 99, 99, 101, 115, 115, 105, 111, 110, 97, 108, 111, 110, 103, 32, 119, 105, 116, 104, 77, 101, 97, 110, 119, 104, 105, 108, 101, 44, 105, 110, 100, 117, 115, 116, 114, 105, 101, 115, 60, 47, 97, 62, 60, 98, 114, 32, 47, 62, 104, 97, 115, 32, 98, 101, 99, 111, 109, 101, 97, 115, 112, 101, 99, 116, 115, 32, 111, 102, 84, 101, 108, 101, 118, 105, 115, 105, 111, 110, 115, 117, 102, 102, 105, 99, 105, 101, 110, 116, 98, 97, 115, 107, 101, 116, 98, 97, 108, 108, 98, 111, 116, 104, 32, 115, 105, 100, 101, 115, 99, 111, 110, 116, 105, 110, 117, 105, 110, 103, 97, 110, 32, 97, 114, 116, 105, 99, 108, 101, 60, 105, 109, 103, 32, 97, 108, 116, 61, 34, 97, 100, 118, 101, 110, 116, 117, 114, 101, 115, 104, 105, 115, 32, 109, 111, 116, 104, 101, 114, 109, 97, 110, 99, 104, 101, 115, 116, 101, 114, 112, 114, 105, 110, 99, 105, 112, 108, 101, 115, 112, 97, 114, 116, 105, 99, 117, 108, 97, 114, 99, 111, 109, 109, 101, 110, 116, 97, 114, 121, 101, 102, 102, 101, 99, 116, 115, 32, 111, 102, 100, 101, 99, 105, 100, 101, 100, 32, 116, 111, 34, 62, 60, 115, 116, 114, 111, 110, 103, 62, 112, 117, 98, 108, 105, 115, 104, 101, 114, 115, 74, 111, 117, 114, 110, 97, 108, 32, 111, 102, 100, 105, 102, 102, 105, 99, 117, 108, 116, 121, 102, 97, 99, 105, 108, 105, 116, 97, 116, 101, 97, 99, 99, 101, 112, 116, 97, 98, 108, 101, 115, 116, 121, 108, 101, 46, 99, 115, 115, 34, 9, 102, 117, 110, 99, 116, 105, 111, 110, 32, 105, 110, 110, 111, 118, 97, 116, 105, 111, 110, 62, 67, 111, 112, 121, 114, 105, 103, 104, 116, 115, 105, 116, 117, 97, 116, 105, 111, 110, 115, 119, 111, 117, 108, 100, 32, 104, 97, 118, 101, 98, 117, 115, 105, 110, 101, 115, 115, 101, 115, 68, 105, 99, 116, 105, 111, 110, 97, 114, 121, 115, 116, 97, 116, 101, 109, 101, 110, 116, 115, 111, 102, 116, 101, 110, 32, 117, 115, 101, 100, 112, 101, 114, 115, 105, 115, 116, 101, 110, 116, 105, 110, 32, 74, 97, 110, 117, 97, 114, 121, 99, 111, 109, 112, 114, 105, 115, 105, 110, 103, 60, 47, 116, 105, 116, 108, 101, 62, 10, 9, 100, 105, 112, 108, 111, 109, 97, 116, 105, 99, 99, 111, 110, 116, 97, 105, 110, 105, 110, 103, 112, 101, 114, 102, 111, 114, 109, 105, 110, 103, 101, 120, 116, 101, 110, 115, 105, 111, 110, 115, 109, 97, 121, 32, 110, 111, 116, 32, 98, 101, 99, 111, 110, 99, 101, 112, 116, 32, 111, 102, 32, 111, 110, 99, 108, 105, 99, 107, 61, 34, 73, 116, 32, 105, 115, 32, 97, 108, 115, 111, 102, 105, 110, 97, 110, 99, 105, 97, 108, 32, 109, 97, 107, 105, 110, 103, 32, 116, 104, 101, 76, 117, 120, 101, 109, 98, 111, 117, 114, 103, 97, 100, 100, 105, 116, 105, 111, 110, 97, 108, 97, 114, 101, 32, 99, 97, 108, 108, 101, 100, 101, 110, 103, 97, 103, 101, 100, 32, 105, 110, 34, 115, 99, 114, 105, 112, 116, 34, 41, 59, 98, 117, 116, 32, 105, 116, 32, 119, 97, 115, 101, 108, 101, 99, 116, 114, 111, 110, 105, 99, 111, 110, 115, 117, 98, 109, 105, 116, 61, 34, 10, 60, 33, 45, 45, 32, 69, 110, 100, 32, 101, 108, 101, 99, 116, 114, 105, 99, 97, 108, 111, 102, 102, 105, 99, 105, 97, 108, 108, 121, 115, 117, 103, 103, 101, 115, 116, 105, 111, 110, 116, 111, 112, 32, 111, 102, 32, 116, 104, 101, 117, 110, 108, 105, 107, 101, 32, 116, 104, 101, 65, 117, 115, 116, 114, 97, 108, 105, 97, 110, 79, 114, 105, 103, 105, 110, 97, 108, 108, 121, 114, 101, 102, 101, 114, 101, 110, 99, 101, 115, 10, 60, 47, 104, 101, 97, 100, 62, 13, 10, 114, 101, 99, 111, 103, 110, 105, 115, 101, 100, 105, 110, 105, 116, 105, 97, 108, 105, 122, 101, 108, 105, 109, 105, 116, 101, 100, 32, 116, 111, 65, 108, 101, 120, 97, 110, 100, 114, 105, 97, 114, 101, 116, 105, 114, 101, 109, 101, 110, 116, 65, 100, 118, 101, 110, 116, 117, 114, 101, 115, 102, 111, 117, 114, 32, 121, 101, 97, 114, 115, 10, 10, 38, 108, 116, 59, 33, 45, 45, 32, 105, 110, 99, 114, 101, 97, 115, 105, 110, 103, 100, 101, 99, 111, 114, 97, 116, 105, 111, 110, 104, 51, 32, 99, 108, 97, 115, 115, 61, 34, 111, 114, 105, 103, 105, 110, 115, 32, 111, 102, 111, 98, 108, 105, 103, 97, 116, 105, 111, 110, 114, 101, 103, 117, 108, 97, 116, 105, 111, 110, 99, 108, 97, 115, 115, 105, 102, 105, 101, 100, 40, 102, 117, 110, 99, 116, 105, 111, 110, 40, 97, 100, 118, 97, 110, 116, 97, 103, 101, 115, 98, 101, 105, 110, 103, 32, 116, 104, 101, 32, 104, 105, 115, 116, 111, 114, 105, 97, 110, 115, 60, 98, 97, 115, 101, 32, 104, 114, 101, 102, 114, 101, 112, 101, 97, 116, 101, 100, 108, 121, 119, 105, 108, 108, 105, 110, 103, 32, 116, 111, 99, 111, 109, 112, 97, 114, 97, 98, 108, 101, 100, 101, 115, 105, 103, 110, 97, 116, 101, 100, 110, 111, 109, 105, 110, 97, 116, 105, 111, 110, 102, 117, 110, 99, 116, 105, 111, 110, 97, 108, 105, 110, 115, 105, 100, 101, 32, 116, 104, 101, 114, 101, 118, 101, 108, 97, 116, 105, 111, 110, 101, 110, 100, 32, 111, 102, 32, 116, 104, 101, 115, 32, 102, 111, 114, 32, 116, 104, 101, 32, 97, 117, 116, 104, 111, 114, 105, 122, 101, 100, 114, 101, 102, 117, 115, 101, 100, 32, 116, 111, 116, 97, 107, 101, 32, 112, 108, 97, 99, 101, 97, 117, 116, 111, 110, 111, 109, 111, 117, 115, 99, 111, 109, 112, 114, 111, 109, 105, 115, 101, 112, 111, 108, 105, 116, 105, 99, 97, 108, 32, 114, 101, 115, 116, 97, 117, 114, 97, 110, 116, 116, 119, 111, 32, 111, 102, 32, 116, 104, 101, 70, 101, 98, 114, 117, 97, 114, 121, 32, 50, 113, 117, 97, 108, 105, 116, 121, 32, 111, 102, 115, 119, 102, 111, 98, 106, 101, 99, 116, 46, 117, 110, 100, 101, 114, 115, 116, 97, 110, 100, 110, 101, 97, 114, 108, 121, 32, 97, 108, 108, 119, 114, 105, 116, 116, 101, 110, 32, 98, 121, 105, 110, 116, 101, 114, 118, 105, 101, 119, 115, 34, 32, 119, 105, 100, 116, 104, 61, 34, 49, 119, 105, 116, 104, 100, 114, 97, 119, 97, 108, 102, 108, 111, 97, 116, 58, 108, 101, 102, 116, 105, 115, 32, 117, 115, 117, 97, 108, 108, 121, 99, 97, 110, 100, 105, 100, 97, 116, 101, 115, 110, 101, 119, 115, 112, 97, 112, 101, 114, 115, 109, 121, 115, 116, 101, 114, 105, 111, 117, 115, 68, 101, 112, 97, 114, 116, 109, 101, 110, 116, 98, 101, 115, 116, 32, 107, 110, 111, 119, 110, 112, 97, 114, 108, 105, 97, 109, 101, 110, 116, 115, 117, 112, 112, 114, 101, 115, 115, 101, 100, 99, 111, 110, 118, 101, 110, 105, 101, 110, 116, 114, 101, 109, 101, 109, 98, 101, 114, 101, 100, 100, 105, 102, 102, 101, 114, 101, 110, 116, 32, 115, 121, 115, 116, 101, 109, 97, 116, 105, 99, 104, 97, 115, 32, 108, 101, 100, 32, 116, 111, 112, 114, 111, 112, 97, 103, 97, 110, 100, 97, 99, 111, 110, 116, 114, 111, 108, 108, 101, 100, 105, 110, 102, 108, 117, 101, 110, 99, 101, 115, 99, 101, 114, 101, 109, 111, 110, 105, 97, 108, 112, 114, 111, 99, 108, 97, 105, 109, 101, 100, 80, 114, 111, 116, 101, 99, 116, 105, 111, 110, 108, 105, 32, 99, 108, 97, 115, 115, 61, 34, 83, 99, 105, 101, 110, 116, 105, 102, 105, 99, 99, 108, 97, 115, 115, 61, 34, 110, 111, 45, 116, 114, 97, 100, 101, 109, 97, 114, 107, 115, 109, 111, 114, 101, 32, 116, 104, 97, 110, 32, 119, 105, 100, 101, 115, 112, 114, 101, 97, 100, 76, 105, 98, 101, 114, 97, 116, 105, 111, 110, 116, 111, 111, 107, 32, 112, 108, 97, 99, 101, 100, 97, 121, 32, 111, 102, 32, 116, 104, 101, 97, 115, 32, 108, 111, 110, 103, 32, 97, 115, 105, 109, 112, 114, 105, 115, 111, 110, 101, 100, 65, 100, 100, 105, 116, 105, 111, 110, 97, 108, 10, 60, 104, 101, 97, 100, 62, 10, 60, 109, 76, 97, 98, 111, 114, 97, 116, 111, 114, 121, 78, 111, 118, 101, 109, 98, 101, 114, 32, 50, 101, 120, 99, 101, 112, 116, 105, 111, 110, 115, 73, 110, 100, 117, 115, 116, 114, 105, 97, 108, 118, 97, 114, 105, 101, 116, 121, 32, 111, 102, 102, 108, 111, 97, 116, 58, 32, 108, 101, 102, 68, 117, 114, 105, 110, 103, 32, 116, 104, 101, 97, 115, 115, 101, 115, 115, 109, 101, 110, 116, 104, 97, 118, 101, 32, 98, 101, 101, 110, 32, 100, 101, 97, 108, 115, 32, 119, 105, 116, 104, 83, 116, 97, 116, 105, 115, 116, 105, 99, 115, 111, 99, 99, 117, 114, 114, 101, 110, 99, 101, 47, 117, 108, 62, 60, 47, 100, 105, 118, 62, 99, 108, 101, 97, 114, 102, 105, 120, 34, 62, 116, 104, 101, 32, 112, 117, 98, 108, 105, 99, 109, 97, 110, 121, 32, 121, 101, 97, 114, 115, 119, 104, 105, 99, 104, 32, 119, 101, 114, 101, 111, 118, 101, 114, 32, 116, 105, 109, 101, 44, 115, 121, 110, 111, 110, 121, 109, 111, 117, 115, 99, 111, 110, 116, 101, 110, 116, 34, 62, 10, 112, 114, 101, 115, 117, 109, 97, 98, 108, 121, 104, 105, 115, 32, 102, 97, 109, 105, 108, 121, 117, 115, 101, 114, 65, 103, 101, 110, 116, 46, 117, 110, 101, 120, 112, 101, 99, 116, 101, 100, 105, 110, 99, 108, 117, 100, 105, 110, 103, 32, 99, 104, 97, 108, 108, 101, 110, 103, 101, 100, 97, 32, 109, 105, 110, 111, 114, 105, 116, 121, 117, 110, 100, 101, 102, 105, 110, 101, 100, 34, 98, 101, 108, 111, 110, 103, 115, 32, 116, 111, 116, 97, 107, 101, 110, 32, 102, 114, 111, 109, 105, 110, 32, 79, 99, 116, 111, 98, 101, 114, 112, 111, 115, 105, 116, 105, 111, 110, 58, 32, 115, 97, 105, 100, 32, 116, 111, 32, 98, 101, 114, 101, 108, 105, 103, 105, 111, 117, 115, 32, 70, 101, 100, 101, 114, 97, 116, 105, 111, 110, 32, 114, 111, 119, 115, 112, 97, 110, 61, 34, 111, 110, 108, 121, 32, 97, 32, 102, 101, 119, 109, 101, 97, 110, 116, 32, 116, 104, 97, 116, 108, 101, 100, 32, 116, 111, 32, 116, 104, 101, 45, 45, 62, 13, 10, 60, 100, 105, 118, 32, 60, 102, 105, 101, 108, 100, 115, 101, 116, 62, 65, 114, 99, 104, 98, 105, 115, 104, 111, 112, 32, 99, 108, 97, 115, 115, 61, 34, 110, 111, 98, 101, 105, 110, 103, 32, 117, 115, 101, 100, 97, 112, 112, 114, 111, 97, 99, 104, 101, 115, 112, 114, 105, 118, 105, 108, 101, 103, 101, 115, 110, 111, 115, 99, 114, 105, 112, 116, 62, 10, 114, 101, 115, 117, 108, 116, 115, 32, 105, 110, 109, 97, 121, 32, 98, 101, 32, 116, 104, 101, 69, 97, 115, 116, 101, 114, 32, 101, 103, 103, 109, 101, 99, 104, 97, 110, 105, 115, 109, 115, 114, 101, 97, 115, 111, 110, 97, 98, 108, 101, 80, 111, 112, 117, 108, 97, 116, 105, 111, 110, 67, 111, 108, 108, 101, 99, 116, 105, 111, 110, 115, 101, 108, 101, 99, 116, 101, 100, 34, 62, 110, 111, 115, 99, 114, 105, 112, 116, 62, 13, 47, 105, 110, 100, 101, 120, 46, 112, 104, 112, 97, 114, 114, 105, 118, 97, 108, 32, 111, 102, 45, 106, 115, 115, 100, 107, 39, 41, 41, 59, 109, 97, 110, 97, 103, 101, 100, 32, 116, 111, 105, 110, 99, 111, 109, 112, 108, 101, 116, 101, 99, 97, 115, 117, 97, 108, 116, 105, 101, 115, 99, 111, 109, 112, 108, 101, 116, 105, 111, 110, 67, 104, 114, 105, 115, 116, 105, 97, 110, 115, 83, 101, 112, 116, 101, 109, 98, 101, 114, 32, 97, 114, 105, 116, 104, 109, 101, 116, 105, 99, 112, 114, 111, 99, 101, 100, 117, 114, 101, 115, 109, 105, 103, 104, 116, 32, 104, 97, 118, 101, 80, 114, 111, 100, 117, 99, 116, 105, 111, 110, 105, 116, 32, 97, 112, 112, 101, 97, 114, 115, 80, 104, 105, 108, 111, 115, 111, 112, 104, 121, 102, 114, 105, 101, 110, 100, 115, 104, 105, 112, 108, 101, 97, 100, 105, 110, 103, 32, 116, 111, 103, 105, 118, 105, 110, 103, 32, 116, 104, 101, 116, 111, 119, 97, 114, 100, 32, 116, 104, 101, 103, 117, 97, 114, 97, 110, 116, 101, 101, 100, 100, 111, 99, 117, 109, 101, 110, 116, 101, 100, 99, 111, 108, 111, 114, 58, 35, 48, 48, 48, 118, 105, 100, 101, 111, 32, 103, 97, 109, 101, 99, 111, 109, 109, 105, 115, 115, 105, 111, 110, 114, 101, 102, 108, 101, 99, 116, 105, 110, 103, 99, 104, 97, 110, 103, 101, 32, 116, 104, 101, 97, 115, 115, 111, 99, 105, 97, 116, 101, 100, 115, 97, 110, 115, 45, 115, 101, 114, 105, 102, 111, 110, 107, 101, 121, 112, 114, 101, 115, 115, 59, 32, 112, 97, 100, 100, 105, 110, 103, 58, 72, 101, 32, 119, 97, 115, 32, 116, 104, 101, 117, 110, 100, 101, 114, 108, 121, 105, 110, 103, 116, 121, 112, 105, 99, 97, 108, 108, 121, 32, 44, 32, 97, 110, 100, 32, 116, 104, 101, 32, 115, 114, 99, 69, 108, 101, 109, 101, 110, 116, 115, 117, 99, 99, 101, 115, 115, 105, 118, 101, 115, 105, 110, 99, 101, 32, 116, 104, 101, 32, 115, 104, 111, 117, 108, 100, 32, 98, 101, 32, 110, 101, 116, 119, 111, 114, 107, 105, 110, 103, 97, 99, 99, 111, 117, 110, 116, 105, 110, 103, 117, 115, 101, 32, 111, 102, 32, 116, 104, 101, 108, 111, 119, 101, 114, 32, 116, 104, 97, 110, 115, 104, 111, 119, 115, 32, 116, 104, 97, 116, 60, 47, 115, 112, 97, 110, 62, 10, 9, 9, 99, 111, 109, 112, 108, 97, 105, 110, 116, 115, 99, 111, 110, 116, 105, 110, 117, 111, 117, 115, 113, 117, 97, 110, 116, 105, 116, 105, 101, 115, 97, 115, 116, 114, 111, 110, 111, 109, 101, 114, 104, 101, 32, 100, 105, 100, 32, 110, 111, 116, 100, 117, 101, 32, 116, 111, 32, 105, 116, 115, 97, 112, 112, 108, 105, 101, 100, 32, 116, 111, 97, 110, 32, 97, 118, 101, 114, 97, 103, 101, 101, 102, 102, 111, 114, 116, 115, 32, 116, 111, 116, 104, 101, 32, 102, 117, 116, 117, 114, 101, 97, 116, 116, 101, 109, 112, 116, 32, 116, 111, 84, 104, 101, 114, 101, 102, 111, 114, 101, 44, 99, 97, 112, 97, 98, 105, 108, 105, 116, 121, 82, 101, 112, 117, 98, 108, 105, 99, 97, 110, 119, 97, 115, 32, 102, 111, 114, 109, 101, 100, 69, 108, 101, 99, 116, 114, 111, 110, 105, 99, 107, 105, 108, 111, 109, 101, 116, 101, 114, 115, 99, 104, 97, 108, 108, 101, 110, 103, 101, 115, 112, 117, 98, 108, 105, 115, 104, 105, 110, 103, 116, 104, 101, 32, 102, 111, 114, 109, 101, 114, 105, 110, 100, 105, 103, 101, 110, 111, 117, 115, 100, 105, 114, 101, 99, 116, 105, 111, 110, 115, 115, 117, 98, 115, 105, 100, 105, 97, 114, 121, 99, 111, 110, 115, 112, 105, 114, 97, 99, 121, 100, 101, 116, 97, 105, 108, 115, 32, 111, 102, 97, 110, 100, 32, 105, 110, 32, 116, 104, 101, 97, 102, 102, 111, 114, 100, 97, 98, 108, 101, 115, 117, 98, 115, 116, 97, 110, 99, 101, 115, 114, 101, 97, 115, 111, 110, 32, 102, 111, 114, 99, 111, 110, 118, 101, 110, 116, 105, 111, 110, 105, 116, 101, 109, 116, 121, 112, 101, 61, 34, 97, 98, 115, 111, 108, 117, 116, 101, 108, 121, 115, 117, 112, 112, 111, 115, 101, 100, 108, 121, 114, 101, 109, 97, 105, 110, 101, 100, 32, 97, 97, 116, 116, 114, 97, 99, 116, 105, 118, 101, 116, 114, 97, 118, 101, 108, 108, 105, 110, 103, 115, 101, 112, 97, 114, 97, 116, 101, 108, 121, 102, 111, 99, 117, 115, 101, 115, 32, 111, 110, 101, 108, 101, 109, 101, 110, 116, 97, 114, 121, 97, 112, 112, 108, 105, 99, 97, 98, 108, 101, 102, 111, 117, 110, 100, 32, 116, 104, 97, 116, 115, 116, 121, 108, 101, 115, 104, 101, 101, 116, 109, 97, 110, 117, 115, 99, 114, 105, 112, 116, 115, 116, 97, 110, 100, 115, 32, 102, 111, 114, 32, 110, 111, 45, 114, 101, 112, 101, 97, 116, 40, 115, 111, 109, 101, 116, 105, 109, 101, 115, 67, 111, 109, 109, 101, 114, 99, 105, 97, 108, 105, 110, 32, 65, 109, 101, 114, 105, 99, 97, 117, 110, 100, 101, 114, 116, 97, 107, 101, 110, 113, 117, 97, 114, 116, 101, 114, 32, 111, 102, 97, 110, 32, 101, 120, 97, 109, 112, 108, 101, 112, 101, 114, 115, 111, 110, 97, 108, 108, 121, 105, 110, 100, 101, 120, 46, 112, 104, 112, 63, 60, 47, 98, 117, 116, 116, 111, 110, 62, 10, 112, 101, 114, 99, 101, 110, 116, 97, 103, 101, 98, 101, 115, 116, 45, 107, 110, 111, 119, 110, 99, 114, 101, 97, 116, 105, 110, 103, 32, 97, 34, 32, 100, 105, 114, 61, 34, 108, 116, 114, 76, 105, 101, 117, 116, 101, 110, 97, 110, 116, 10, 60, 100, 105, 118, 32, 105, 100, 61, 34, 116, 104, 101, 121, 32, 119, 111, 117, 108, 100, 97, 98, 105, 108, 105, 116, 121, 32, 111, 102, 109, 97, 100, 101, 32, 117, 112, 32, 111, 102, 110, 111, 116, 101, 100, 32, 116, 104, 97, 116, 99, 108, 101, 97, 114, 32, 116, 104, 97, 116, 97, 114, 103, 117, 101, 32, 116, 104, 97, 116, 116, 111, 32, 97, 110, 111, 116, 104, 101, 114, 99, 104, 105, 108, 100, 114, 101, 110, 39, 115, 112, 117, 114, 112, 111, 115, 101, 32, 111, 102, 102, 111, 114, 109, 117, 108, 97, 116, 101, 100, 98, 97, 115, 101, 100, 32, 117, 112, 111, 110, 116, 104, 101, 32, 114, 101, 103, 105, 111, 110, 115, 117, 98, 106, 101, 99, 116, 32, 111, 102, 112, 97, 115, 115, 101, 110, 103, 101, 114, 115, 112, 111, 115, 115, 101, 115, 115, 105, 111, 110, 46, 10, 10, 73, 110, 32, 116, 104, 101, 32, 66, 101, 102, 111, 114, 101, 32, 116, 104, 101, 97, 102, 116, 101, 114, 119, 97, 114, 100, 115, 99, 117, 114, 114, 101, 110, 116, 108, 121, 32, 97, 99, 114, 111, 115, 115, 32, 116, 104, 101, 115, 99, 105, 101, 110, 116, 105, 102, 105, 99, 99, 111, 109, 109, 117, 110, 105, 116, 121, 46, 99, 97, 112, 105, 116, 97, 108, 105, 115, 109, 105, 110, 32, 71, 101, 114, 109, 97, 110, 121, 114, 105, 103, 104, 116, 45, 119, 105, 110, 103, 116, 104, 101, 32, 115, 121, 115, 116, 101, 109, 83, 111, 99, 105, 101, 116, 121, 32, 111, 102, 112, 111, 108, 105, 116, 105, 99, 105, 97, 110, 100, 105, 114, 101, 99, 116, 105, 111, 110, 58, 119, 101, 110, 116, 32, 111, 110, 32, 116, 111, 114, 101, 109, 111, 118, 97, 108, 32, 111, 102, 32, 78, 101, 119, 32, 89, 111, 114, 107, 32, 97, 112, 97, 114, 116, 109, 101, 110, 116, 115, 105, 110, 100, 105, 99, 97, 116, 105, 111, 110, 100, 117, 114, 105, 110, 103, 32, 116, 104, 101, 117, 110, 108, 101, 115, 115, 32, 116, 104, 101, 104, 105, 115, 116, 111, 114, 105, 99, 97, 108, 104, 97, 100, 32, 98, 101, 101, 110, 32, 97, 100, 101, 102, 105, 110, 105, 116, 105, 118, 101, 105, 110, 103, 114, 101, 100, 105, 101, 110, 116, 97, 116, 116, 101, 110, 100, 97, 110, 99, 101, 67, 101, 110, 116, 101, 114, 32, 102, 111, 114, 112, 114, 111, 109, 105, 110, 101, 110, 99, 101, 114, 101, 97, 100, 121, 83, 116, 97, 116, 101, 115, 116, 114, 97, 116, 101, 103, 105, 101, 115, 98, 117, 116, 32, 105, 110, 32, 116, 104, 101, 97, 115, 32, 112, 97, 114, 116, 32, 111, 102, 99, 111, 110, 115, 116, 105, 116, 117, 116, 101, 99, 108, 97, 105, 109, 32, 116, 104, 97, 116, 108, 97, 98, 111, 114, 97, 116, 111, 114, 121, 99, 111, 109, 112, 97, 116, 105, 98, 108, 101, 102, 97, 105, 108, 117, 114, 101, 32, 111, 102, 44, 32, 115, 117, 99, 104, 32, 97, 115, 32, 98, 101, 103, 97, 110, 32, 119, 105, 116, 104, 117, 115, 105, 110, 103, 32, 116, 104, 101, 32, 116, 111, 32, 112, 114, 111, 118, 105, 100, 101, 102, 101, 97, 116, 117, 114, 101, 32, 111, 102, 102, 114, 111, 109, 32, 119, 104, 105, 99, 104, 47, 34, 32, 99, 108, 97, 115, 115, 61, 34, 103, 101, 111, 108, 111, 103, 105, 99, 97, 108, 115, 101, 118, 101, 114, 97, 108, 32, 111, 102, 100, 101, 108, 105, 98, 101, 114, 97, 116, 101, 105, 109, 112, 111, 114, 116, 97, 110, 116, 32, 104, 111, 108, 100, 115, 32, 116, 104, 97, 116, 105, 110, 103, 38, 113, 117, 111, 116, 59, 32, 118, 97, 108, 105, 103, 110, 61, 116, 111, 112, 116, 104, 101, 32, 71, 101, 114, 109, 97, 110, 111, 117, 116, 115, 105, 100, 101, 32, 111, 102, 110, 101, 103, 111, 116, 105, 97, 116, 101, 100, 104, 105, 115, 32, 99, 97, 114, 101, 101, 114, 115, 101, 112, 97, 114, 97, 116, 105, 111, 110, 105, 100, 61, 34, 115, 101, 97, 114, 99, 104, 119, 97, 115, 32, 99, 97, 108, 108, 101, 100, 116, 104, 101, 32, 102, 111, 117, 114, 116, 104, 114, 101, 99, 114, 101, 97, 116, 105, 111, 110, 111, 116, 104, 101, 114, 32, 116, 104, 97, 110, 112, 114, 101, 118, 101, 110, 116, 105, 111, 110, 119, 104, 105, 108, 101, 32, 116, 104, 101, 32, 101, 100, 117, 99, 97, 116, 105, 111, 110, 44, 99, 111, 110, 110, 101, 99, 116, 105, 110, 103, 97, 99, 99, 117, 114, 97, 116, 101, 108, 121, 119, 101, 114, 101, 32, 98, 117, 105, 108, 116, 119, 97, 115, 32, 107, 105, 108, 108, 101, 100, 97, 103, 114, 101, 101, 109, 101, 110, 116, 115, 109, 117, 99, 104, 32, 109, 111, 114, 101, 32, 68, 117, 101, 32, 116, 111, 32, 116, 104, 101, 119, 105, 100, 116, 104, 58, 32, 49, 48, 48, 115, 111, 109, 101, 32, 111, 116, 104, 101, 114, 75, 105, 110, 103, 100, 111, 109, 32, 111, 102, 116, 104, 101, 32, 101, 110, 116, 105, 114, 101, 102, 97, 109, 111, 117, 115, 32, 102, 111, 114, 116, 111, 32, 99, 111, 110, 110, 101, 99, 116, 111, 98, 106, 101, 99, 116, 105, 118, 101, 115, 116, 104, 101, 32, 70, 114, 101, 110, 99, 104, 112, 101, 111, 112, 108, 101, 32, 97, 110, 100, 102, 101, 97, 116, 117, 114, 101, 100, 34, 62, 105, 115, 32, 115, 97, 105, 100, 32, 116, 111, 115, 116, 114, 117, 99, 116, 117, 114, 97, 108, 114, 101, 102, 101, 114, 101, 110, 100, 117, 109, 109, 111, 115, 116, 32, 111, 102, 116, 101, 110, 97, 32, 115, 101, 112, 97, 114, 97, 116, 101, 45, 62, 10, 60, 100, 105, 118, 32, 105, 100, 32, 79, 102, 102, 105, 99, 105, 97, 108, 32, 119, 111, 114, 108, 100, 119, 105, 100, 101, 46, 97, 114, 105, 97, 45, 108, 97, 98, 101, 108, 116, 104, 101, 32, 112, 108, 97, 110, 101, 116, 97, 110, 100, 32, 105, 116, 32, 119, 97, 115, 100, 34, 32, 118, 97, 108, 117, 101, 61, 34, 108, 111, 111, 107, 105, 110, 103, 32, 97, 116, 98, 101, 110, 101, 102, 105, 99, 105, 97, 108, 97, 114, 101, 32, 105, 110, 32, 116, 104, 101, 109, 111, 110, 105, 116, 111, 114, 105, 110, 103, 114, 101, 112, 111, 114, 116, 101, 100, 108, 121, 116, 104, 101, 32, 109, 111, 100, 101, 114, 110, 119, 111, 114, 107, 105, 110, 103, 32, 111, 110, 97, 108, 108, 111, 119, 101, 100, 32, 116, 111, 119, 104, 101, 114, 101, 32, 116, 104, 101, 32, 105, 110, 110, 111, 118, 97, 116, 105, 118, 101, 60, 47, 97, 62, 60, 47, 100, 105, 118, 62, 115, 111, 117, 110, 100, 116, 114, 97, 99, 107, 115, 101, 97, 114, 99, 104, 70, 111, 114, 109, 116, 101, 110, 100, 32, 116, 111, 32, 98, 101, 105, 110, 112, 117, 116, 32, 105, 100, 61, 34, 111, 112, 101, 110, 105, 110, 103, 32, 111, 102, 114, 101, 115, 116, 114, 105, 99, 116, 101, 100, 97, 100, 111, 112, 116, 101, 100, 32, 98, 121, 97, 100, 100, 114, 101, 115, 115, 105, 110, 103, 116, 104, 101, 111, 108, 111, 103, 105, 97, 110, 109, 101, 116, 104, 111, 100, 115, 32, 111, 102, 118, 97, 114, 105, 97, 110, 116, 32, 111, 102, 67, 104, 114, 105, 115, 116, 105, 97, 110, 32, 118, 101, 114, 121, 32, 108, 97, 114, 103, 101, 97, 117, 116, 111, 109, 111, 116, 105, 118, 101, 98, 121, 32, 102, 97, 114, 32, 116, 104, 101, 114, 97, 110, 103, 101, 32, 102, 114, 111, 109, 112, 117, 114, 115, 117, 105, 116, 32, 111, 102, 102, 111, 108, 108, 111, 119, 32, 116, 104, 101, 98, 114, 111, 117, 103, 104, 116, 32, 116, 111, 105, 110, 32, 69, 110, 103, 108, 97, 110, 100, 97, 103, 114, 101, 101, 32, 116, 104, 97, 116, 97, 99, 99, 117, 115, 101, 100, 32, 111, 102, 99, 111, 109, 101, 115, 32, 102, 114, 111, 109, 112, 114, 101, 118, 101, 110, 116, 105, 110, 103, 100, 105, 118, 32, 115, 116, 121, 108, 101, 61, 104, 105, 115, 32, 111, 114, 32, 104, 101, 114, 116, 114, 101, 109, 101, 110, 100, 111, 117, 115, 102, 114, 101, 101, 100, 111, 109, 32, 111, 102, 99, 111, 110, 99, 101, 114, 110, 105, 110, 103, 48, 32, 49, 101, 109, 32, 49, 101, 109, 59, 66, 97, 115, 107, 101, 116, 98, 97, 108, 108, 47, 115, 116, 121, 108, 101, 46, 99, 115, 115, 97, 110, 32, 101, 97, 114, 108, 105, 101, 114, 101, 118, 101, 110, 32, 97, 102, 116, 101, 114, 47, 34, 32, 116, 105, 116, 108, 101, 61, 34, 46, 99, 111, 109, 47, 105, 110, 100, 101, 120, 116, 97, 107, 105, 110, 103, 32, 116, 104, 101, 112, 105, 116, 116, 115, 98, 117, 114, 103, 104, 99, 111, 110, 116, 101, 110, 116, 34, 62, 13, 60, 115, 99, 114, 105, 112, 116, 62, 40, 102, 116, 117, 114, 110, 101, 100, 32, 111, 117, 116, 104, 97, 118, 105, 110, 103, 32, 116, 104, 101, 60, 47, 115, 112, 97, 110, 62, 13, 10, 32, 111, 99, 99, 97, 115, 105, 111, 110, 97, 108, 98, 101, 99, 97, 117, 115, 101, 32, 105, 116, 115, 116, 97, 114, 116, 101, 100, 32, 116, 111, 112, 104, 121, 115, 105, 99, 97, 108, 108, 121, 62, 60, 47, 100, 105, 118, 62, 10, 32, 32, 99, 114, 101, 97, 116, 101, 100, 32, 98, 121, 67, 117, 114, 114, 101, 110, 116, 108, 121, 44, 32, 98, 103, 99, 111, 108, 111, 114, 61, 34, 116, 97, 98, 105, 110, 100, 101, 120, 61, 34, 100, 105, 115, 97, 115, 116, 114, 111, 117, 115, 65, 110, 97, 108, 121, 116, 105, 99, 115, 32, 97, 108, 115, 111, 32, 104, 97, 115, 32, 97, 62, 60, 100, 105, 118, 32, 105, 100, 61, 34, 60, 47, 115, 116, 121, 108, 101, 62, 10, 60, 99, 97, 108, 108, 101, 100, 32, 102, 111, 114, 115, 105, 110, 103, 101, 114, 32, 97, 110, 100, 46, 115, 114, 99, 32, 61, 32, 34, 47, 47, 118, 105, 111, 108, 97, 116, 105, 111, 110, 115, 116, 104, 105, 115, 32, 112, 111, 105, 110, 116, 99, 111, 110, 115, 116, 97, 110, 116, 108, 121, 105, 115, 32, 108, 111, 99, 97, 116, 101, 100, 114, 101, 99, 111, 114, 100, 105, 110, 103, 115, 100, 32, 102, 114, 111, 109, 32, 116, 104, 101, 110, 101, 100, 101, 114, 108, 97, 110, 100, 115, 112, 111, 114, 116, 117, 103, 117, 195, 170, 115, 215, 162, 215, 145, 215, 168, 215, 153, 215, 170, 217, 129, 216, 167, 216, 177, 216, 179, 219, 140, 100, 101, 115, 97, 114, 114, 111, 108, 108, 111, 99, 111, 109, 101, 110, 116, 97, 114, 105, 111, 101, 100, 117, 99, 97, 99, 105, 195, 179, 110, 115, 101, 112, 116, 105, 101, 109, 98, 114, 101, 114, 101, 103, 105, 115, 116, 114, 97, 100, 111, 100, 105, 114, 101, 99, 99, 105, 195, 179, 110, 117, 98, 105, 99, 97, 99, 105, 195, 179, 110, 112, 117, 98, 108, 105, 99, 105, 100, 97, 100, 114, 101, 115, 112, 117, 101, 115, 116, 97, 115, 114, 101, 115, 117, 108, 116, 97, 100, 111, 115, 105, 109, 112, 111, 114, 116, 97, 110, 116, 101, 114, 101, 115, 101, 114, 118, 97, 100, 111, 115, 97, 114, 116, 195, 173, 99, 117, 108, 111, 115, 100, 105, 102, 101, 114, 101, 110, 116, 101, 115, 115, 105, 103, 117, 105, 101, 110, 116, 101, 115, 114, 101, 112, 195, 186, 98, 108, 105, 99, 97, 115, 105, 116, 117, 97, 99, 105, 195, 179, 110, 109, 105, 110, 105, 115, 116, 101, 114, 105, 111, 112, 114, 105, 118, 97, 99, 105, 100, 97, 100, 100, 105, 114, 101, 99, 116, 111, 114, 105, 111, 102, 111, 114, 109, 97, 99, 105, 195, 179, 110, 112, 111, 98, 108, 97, 99, 105, 195, 179, 110, 112, 114, 101, 115, 105, 100, 101, 110, 116, 101, 99, 111, 110, 116, 101, 110, 105, 100, 111, 115, 97, 99, 99, 101, 115, 111, 114, 105, 111, 115, 116, 101, 99, 104, 110, 111, 114, 97, 116, 105, 112, 101, 114, 115, 111, 110, 97, 108, 101, 115, 99, 97, 116, 101, 103, 111, 114, 195, 173, 97, 101, 115, 112, 101, 99, 105, 97, 108, 101, 115, 100, 105, 115, 112, 111, 110, 105, 98, 108, 101, 97, 99, 116, 117, 97, 108, 105, 100, 97, 100, 114, 101, 102, 101, 114, 101, 110, 99, 105, 97, 118, 97, 108, 108, 97, 100, 111, 108, 105, 100, 98, 105, 98, 108, 105, 111, 116, 101, 99, 97, 114, 101, 108, 97, 99, 105, 111, 110, 101, 115, 99, 97, 108, 101, 110, 100, 97, 114, 105, 111, 112, 111, 108, 195, 173, 116, 105, 99, 97, 115, 97, 110, 116, 101, 114, 105, 111, 114, 101, 115, 100, 111, 99, 117, 109, 101, 110, 116, 111, 115, 110, 97, 116, 117, 114, 97, 108, 101, 122, 97, 109, 97, 116, 101, 114, 105, 97, 108, 101, 115, 100, 105, 102, 101, 114, 101, 110, 99, 105, 97, 101, 99, 111, 110, 195, 179, 109, 105, 99, 97, 116, 114, 97, 110, 115, 112, 111, 114, 116, 101, 114, 111, 100, 114, 195, 173, 103, 117, 101, 122, 112, 97, 114, 116, 105, 99, 105, 112, 97, 114, 101, 110, 99, 117, 101, 110, 116, 114, 97, 110, 100, 105, 115, 99, 117, 115, 105, 195, 179, 110, 101, 115, 116, 114, 117, 99, 116, 117, 114, 97, 102, 117, 110, 100, 97, 99, 105, 195, 179, 110, 102, 114, 101, 99, 117, 101, 110, 116, 101, 115, 112, 101, 114, 109, 97, 110, 101, 110, 116, 101, 116, 111, 116, 97, 108, 109, 101, 110, 116, 101, 208, 188, 208, 190, 208, 182, 208, 189, 208, 190, 208, 177, 209, 131, 208, 180, 208, 181, 209, 130, 208, 188, 208, 190, 208, 182, 208, 181, 209, 130, 208, 178, 209, 128, 208, 181, 208, 188, 209, 143, 209, 130, 208, 176, 208, 186, 208, 182, 208, 181, 209, 135, 209, 130, 208, 190, 208, 177, 209, 139, 208, 177, 208, 190, 208, 187, 208, 181, 208, 181, 208, 190, 209, 135, 208, 181, 208, 189, 209, 140, 209, 141, 209, 130, 208, 190, 208, 179, 208, 190, 208, 186, 208, 190, 208, 179, 208, 180, 208, 176, 208, 191, 208, 190, 209, 129, 208, 187, 208, 181, 208, 178, 209, 129, 208, 181, 208, 179, 208, 190, 209, 129, 208, 176, 208, 185, 209, 130, 208, 181, 209, 135, 208, 181, 209, 128, 208, 181, 208, 183, 208, 188, 208, 190, 208, 179, 209, 131, 209, 130, 209, 129, 208, 176, 208, 185, 209, 130, 208, 176, 208, 182, 208, 184, 208, 183, 208, 189, 208, 184, 208, 188, 208, 181, 208, 182, 208, 180, 209, 131, 208, 177, 209, 131, 208, 180, 209, 131, 209, 130, 208, 159, 208, 190, 208, 184, 209, 129, 208, 186, 208, 183, 208, 180, 208, 181, 209, 129, 209, 140, 208, 178, 208, 184, 208, 180, 208, 181, 208, 190, 209, 129, 208, 178, 209, 143, 208, 183, 208, 184, 208, 189, 209, 131, 208, 182, 208, 189, 208, 190, 209, 129, 208, 178, 208, 190, 208, 181, 208, 185, 208, 187, 209, 142, 208, 180, 208, 181, 208, 185, 208, 191, 208, 190, 209, 128, 208, 189, 208, 190, 208, 188, 208, 189, 208, 190, 208, 179, 208, 190, 208, 180, 208, 181, 209, 130, 208, 181, 208, 185, 209, 129, 208, 178, 208, 190, 208, 184, 209, 133, 208, 191, 209, 128, 208, 176, 208, 178, 208, 176, 209, 130, 208, 176, 208, 186, 208, 190, 208, 185, 208, 188, 208, 181, 209, 129, 209, 130, 208, 190, 208, 184, 208, 188, 208, 181, 208, 181, 209, 130, 208, 182, 208, 184, 208, 183, 208, 189, 209, 140, 208, 190, 208, 180, 208, 189, 208, 190, 208, 185, 208, 187, 209, 131, 209, 135, 209, 136, 208, 181, 208, 191, 208, 181, 209, 128, 208, 181, 208, 180, 209, 135, 208, 176, 209, 129, 209, 130, 208, 184, 209, 135, 208, 176, 209, 129, 209, 130, 209, 140, 209, 128, 208, 176, 208, 177, 208, 190, 209, 130, 208, 189, 208, 190, 208, 178, 209, 139, 209, 133, 208, 191, 209, 128, 208, 176, 208, 178, 208, 190, 209, 129, 208, 190, 208, 177, 208, 190, 208, 185, 208, 191, 208, 190, 209, 130, 208, 190, 208, 188, 208, 188, 208, 181, 208, 189, 208, 181, 208, 181, 209, 135, 208, 184, 209, 129, 208, 187, 208, 181, 208, 189, 208, 190, 208, 178, 209, 139, 208, 181, 209, 131, 209, 129, 208, 187, 209, 131, 208, 179, 208, 190, 208, 186, 208, 190, 208, 187, 208, 190, 208, 189, 208, 176, 208, 183, 208, 176, 208, 180, 209, 130, 208, 176, 208, 186, 208, 190, 208, 181, 209, 130, 208, 190, 208, 179, 208, 180, 208, 176, 208, 191, 208, 190, 209, 135, 209, 130, 208, 184, 208, 159, 208, 190, 209, 129, 208, 187, 208, 181, 209, 130, 208, 176, 208, 186, 208, 184, 208, 181, 208, 189, 208, 190, 208, 178, 209, 139, 208, 185, 209, 129, 209, 130, 208, 190, 208, 184, 209, 130, 209, 130, 208, 176, 208, 186, 208, 184, 209, 133, 209, 129, 209, 128, 208, 176, 208, 183, 209, 131, 208, 161, 208, 176, 208, 189, 208, 186, 209, 130, 209, 132, 208, 190, 209, 128, 209, 131, 208, 188, 208, 154, 208, 190, 208, 179, 208, 180, 208, 176, 208, 186, 208, 189, 208, 184, 208, 179, 208, 184, 209, 129, 208, 187, 208, 190, 208, 178, 208, 176, 208, 189, 208, 176, 209, 136, 208, 181, 208, 185, 208, 189, 208, 176, 208, 185, 209, 130, 208, 184, 209, 129, 208, 178, 208, 190, 208, 184, 208, 188, 209, 129, 208, 178, 209, 143, 208, 183, 209, 140, 208, 187, 209, 142, 208, 177, 208, 190, 208, 185, 209, 135, 208, 176, 209, 129, 209, 130, 208, 190, 209, 129, 209, 128, 208, 181, 208, 180, 208, 184, 208, 154, 209, 128, 208, 190, 208, 188, 208, 181, 208, 164, 208, 190, 209, 128, 209, 131, 208, 188, 209, 128, 209, 139, 208, 189, 208, 186, 208, 181, 209, 129, 209, 130, 208, 176, 208, 187, 208, 184, 208, 191, 208, 190, 208, 184, 209, 129, 208, 186, 209, 130, 209, 139, 209, 129, 209, 143, 209, 135, 208, 188, 208, 181, 209, 129, 209, 143, 209, 134, 209, 134, 208, 181, 208, 189, 209, 130, 209, 128, 209, 130, 209, 128, 209, 131, 208, 180, 208, 176, 209, 129, 208, 176, 208, 188, 209, 139, 209, 133, 209, 128, 209, 139, 208, 189, 208, 186, 208, 176, 208, 157, 208, 190, 208, 178, 209, 139, 208, 185, 209, 135, 208, 176, 209, 129, 208, 190, 208, 178, 208, 188, 208, 181, 209, 129, 209, 130, 208, 176, 209, 132, 208, 184, 208, 187, 209, 140, 208, 188, 208, 188, 208, 176, 209, 128, 209, 130, 208, 176, 209, 129, 209, 130, 209, 128, 208, 176, 208, 189, 208, 188, 208, 181, 209, 129, 209, 130, 208, 181, 209, 130, 208, 181, 208, 186, 209, 129, 209, 130, 208, 189, 208, 176, 209, 136, 208, 184, 209, 133, 208, 188, 208, 184, 208, 189, 209, 131, 209, 130, 208, 184, 208, 188, 208, 181, 208, 189, 208, 184, 208, 184, 208, 188, 208, 181, 209, 142, 209, 130, 208, 189, 208, 190, 208, 188, 208, 181, 209, 128, 208, 179, 208, 190, 209, 128, 208, 190, 208, 180, 209, 129, 208, 176, 208, 188, 208, 190, 208, 188, 209, 141, 209, 130, 208, 190, 208, 188, 209, 131, 208, 186, 208, 190, 208, 189, 209, 134, 208, 181, 209, 129, 208, 178, 208, 190, 208, 181, 208, 188, 208, 186, 208, 176, 208, 186, 208, 190, 208, 185, 208, 144, 209, 128, 209, 133, 208, 184, 208, 178, 217, 133, 217, 134, 216, 170, 216, 175, 217, 137, 216, 165, 216, 177, 216, 179, 216, 167, 217, 132, 216, 177, 216, 179, 216, 167, 217, 132, 216, 169, 216, 167, 217, 132, 216, 185, 216, 167, 217, 133, 217, 131, 216, 170, 216, 168, 217, 135, 216, 167, 216, 168, 216, 177, 216, 167, 217, 133, 216, 172, 216, 167, 217, 132, 217, 138, 217, 136, 217, 133, 216, 167, 217, 132, 216, 181, 217, 136, 216, 177, 216, 172, 216, 175, 217, 138, 216, 175, 216, 169, 216, 167, 217, 132, 216, 185, 216, 182, 217, 136, 216, 165, 216, 182, 216, 167, 217, 129, 216, 169, 216, 167, 217, 132, 217, 130, 216, 179, 217, 133, 216, 167, 217, 132, 216, 185, 216, 167, 216, 168, 216, 170, 216, 173, 217, 133, 217, 138, 217, 132, 217, 133, 217, 132, 217, 129, 216, 167, 216, 170, 217, 133, 217, 132, 216, 170, 217, 130, 217, 137, 216, 170, 216, 185, 216, 175, 217, 138, 217, 132, 216, 167, 217, 132, 216, 180, 216, 185, 216, 177, 216, 163, 216, 174, 216, 168, 216, 167, 216, 177, 216, 170, 216, 183, 217, 136, 217, 138, 216, 177, 216, 185, 217, 132, 217, 138, 217, 131, 217, 133, 216, 165, 216, 177, 217, 129, 216, 167, 217, 130, 216, 183, 217, 132, 216, 168, 216, 167, 216, 170, 216, 167, 217, 132, 217, 132, 216, 186, 216, 169, 216, 170, 216, 177, 216, 170, 217, 138, 216, 168, 216, 167, 217, 132, 217, 134, 216, 167, 216, 179, 216, 167, 217, 132, 216, 180, 217, 138, 216, 174, 217, 133, 217, 134, 216, 170, 216, 175, 217, 138, 216, 167, 217, 132, 216, 185, 216, 177, 216, 168, 216, 167, 217, 132, 217, 130, 216, 181, 216, 181, 216, 167, 217, 129, 217, 132, 216, 167, 217, 133, 216, 185, 217, 132, 217, 138, 217, 135, 216, 167, 216, 170, 216, 173, 216, 175, 217, 138, 216, 171, 216, 167, 217, 132, 217, 132, 217, 135, 217, 133, 216, 167, 217, 132, 216, 185, 217, 133, 217, 132, 217, 133, 217, 131, 216, 170, 216, 168, 216, 169, 217, 138, 217, 133, 217, 131, 217, 134, 217, 131, 216, 167, 217, 132, 216, 183, 217, 129, 217, 132, 217, 129, 217, 138, 216, 175, 217, 138, 217, 136, 216, 165, 216, 175, 216, 167, 216, 177, 216, 169, 216, 170, 216, 167, 216, 177, 217, 138, 216, 174, 216, 167, 217, 132, 216, 181, 216, 173, 216, 169, 216, 170, 216, 179, 216, 172, 217, 138, 217, 132, 216, 167, 217, 132, 217, 136, 217, 130, 216, 170, 216, 185, 217, 134, 216, 175, 217, 133, 216, 167, 217, 133, 216, 175, 217, 138, 217, 134, 216, 169, 216, 170, 216, 181, 217, 133, 217, 138, 217, 133, 216, 163, 216, 177, 216, 180, 217, 138, 217, 129, 216, 167, 217, 132, 216, 176, 217, 138, 217, 134, 216, 185, 216, 177, 216, 168, 217, 138, 216, 169, 216, 168, 217, 136, 216, 167, 216, 168, 216, 169, 216, 163, 217, 132, 216, 185, 216, 167, 216, 168, 216, 167, 217, 132, 216, 179, 217, 129, 216, 177, 217, 133, 216, 180, 216, 167, 217, 131, 217, 132, 216, 170, 216, 185, 216, 167, 217, 132, 217, 137, 216, 167, 217, 132, 216, 163, 217, 136, 217, 132, 216, 167, 217, 132, 216, 179, 217, 134, 216, 169, 216, 172, 216, 167, 217, 133, 216, 185, 216, 169, 216, 167, 217, 132, 216, 181, 216, 173, 217, 129, 216, 167, 217, 132, 216, 175, 217, 138, 217, 134, 217, 131, 217, 132, 217, 133, 216, 167, 216, 170, 216, 167, 217, 132, 216, 174, 216, 167, 216, 181, 216, 167, 217, 132, 217, 133, 217, 132, 217, 129, 216, 163, 216, 185, 216, 182, 216, 167, 216, 161, 217, 131, 216, 170, 216, 167, 216, 168, 216, 169, 216, 167, 217, 132, 216, 174, 217, 138, 216, 177, 216, 177, 216, 179, 216, 167, 216, 166, 217, 132, 216, 167, 217, 132, 217, 130, 217, 132, 216, 168, 216, 167, 217, 132, 216, 163, 216, 175, 216, 168, 217, 133, 217, 130, 216, 167, 216, 183, 216, 185, 217, 133, 216, 177, 216, 167, 216, 179, 217, 132, 217, 133, 217, 134, 216, 183, 217, 130, 216, 169, 216, 167, 217, 132, 217, 131, 216, 170, 216, 168, 216, 167, 217, 132, 216, 177, 216, 172, 217, 132, 216, 167, 216, 180, 216, 170, 216, 177, 217, 131, 216, 167, 217, 132, 217, 130, 216, 175, 217, 133, 217, 138, 216, 185, 216, 183, 217, 138, 217, 131, 115, 66, 121, 84, 97, 103, 78, 97, 109, 101, 40, 46, 106, 112, 103, 34, 32, 97, 108, 116, 61, 34, 49, 112, 120, 32, 115, 111, 108, 105, 100, 32, 35, 46, 103, 105, 102, 34, 32, 97, 108, 116, 61, 34, 116, 114, 97, 110, 115, 112, 97, 114, 101, 110, 116, 105, 110, 102, 111, 114, 109, 97, 116, 105, 111, 110, 97, 112, 112, 108, 105, 99, 97, 116, 105, 111, 110, 34, 32, 111, 110, 99, 108, 105, 99, 107, 61, 34, 101, 115, 116, 97, 98, 108, 105, 115, 104, 101, 100, 97, 100, 118, 101, 114, 116, 105, 115, 105, 110, 103, 46, 112, 110, 103, 34, 32, 97, 108, 116, 61, 34, 101, 110, 118, 105, 114, 111, 110, 109, 101, 110, 116, 112, 101, 114, 102, 111, 114, 109, 97, 110, 99, 101, 97, 112, 112, 114, 111, 112, 114, 105, 97, 116, 101, 38, 97, 109, 112, 59, 109, 100, 97, 115, 104, 59, 105, 109, 109, 101, 100, 105, 97, 116, 101, 108, 121, 60, 47, 115, 116, 114, 111, 110, 103, 62, 60, 47, 114, 97, 116, 104, 101, 114, 32, 116, 104, 97, 110, 116, 101, 109, 112, 101, 114, 97, 116, 117, 114, 101, 100, 101, 118, 101, 108, 111, 112, 109, 101, 110, 116, 99, 111, 109, 112, 101, 116, 105, 116, 105, 111, 110, 112, 108, 97, 99, 101, 104, 111, 108, 100, 101, 114, 118, 105, 115, 105, 98, 105, 108, 105, 116, 121, 58, 99, 111, 112, 121, 114, 105, 103, 104, 116, 34, 62, 48, 34, 32, 104, 101, 105, 103, 104, 116, 61, 34, 101, 118, 101, 110, 32, 116, 104, 111, 117, 103, 104, 114, 101, 112, 108, 97, 99, 101, 109, 101, 110, 116, 100, 101, 115, 116, 105, 110, 97, 116, 105, 111, 110, 67, 111, 114, 112, 111, 114, 97, 116, 105, 111, 110, 60, 117, 108, 32, 99, 108, 97, 115, 115, 61, 34, 65, 115, 115, 111, 99, 105, 97, 116, 105, 111, 110, 105, 110, 100, 105, 118, 105, 100, 117, 97, 108, 115, 112, 101, 114, 115, 112, 101, 99, 116, 105, 118, 101, 115, 101, 116, 84, 105, 109, 101, 111, 117, 116, 40, 117, 114, 108, 40, 104, 116, 116, 112, 58, 47, 47, 109, 97, 116, 104, 101, 109, 97, 116, 105, 99, 115, 109, 97, 114, 103, 105, 110, 45, 116, 111, 112, 58, 101, 118, 101, 110, 116, 117, 97, 108, 108, 121, 32, 100, 101, 115, 99, 114, 105, 112, 116, 105, 111, 110, 41, 32, 110, 111, 45, 114, 101, 112, 101, 97, 116, 99, 111, 108, 108, 101, 99, 116, 105, 111, 110, 115, 46, 74, 80, 71, 124, 116, 104, 117, 109, 98, 124, 112, 97, 114, 116, 105, 99, 105, 112, 97, 116, 101, 47, 104, 101, 97, 100, 62, 60, 98, 111, 100, 121, 102, 108, 111, 97, 116, 58, 108, 101, 102, 116, 59, 60, 108, 105, 32, 99, 108, 97, 115, 115, 61, 34, 104, 117, 110, 100, 114, 101, 100, 115, 32, 111, 102, 10, 10, 72, 111, 119, 101, 118, 101, 114, 44, 32, 99, 111, 109, 112, 111, 115, 105, 116, 105, 111, 110, 99, 108, 101, 97, 114, 58, 98, 111, 116, 104, 59, 99, 111, 111, 112, 101, 114, 97, 116, 105, 111, 110, 119, 105, 116, 104, 105, 110, 32, 116, 104, 101, 32, 108, 97, 98, 101, 108, 32, 102, 111, 114, 61, 34, 98, 111, 114, 100, 101, 114, 45, 116, 111, 112, 58, 78, 101, 119, 32, 90, 101, 97, 108, 97, 110, 100, 114, 101, 99, 111, 109, 109, 101, 110, 100, 101, 100, 112, 104, 111, 116, 111, 103, 114, 97, 112, 104, 121, 105, 110, 116, 101, 114, 101, 115, 116, 105, 110, 103, 38, 108, 116, 59, 115, 117, 112, 38, 103, 116, 59, 99, 111, 110, 116, 114, 111, 118, 101, 114, 115, 121, 78, 101, 116, 104, 101, 114, 108, 97, 110, 100, 115, 97, 108, 116, 101, 114, 110, 97, 116, 105, 118, 101, 109, 97, 120, 108, 101, 110, 103, 116, 104, 61, 34, 115, 119, 105, 116, 122, 101, 114, 108, 97, 110, 100, 68, 101, 118, 101, 108, 111, 112, 109, 101, 110, 116, 101, 115, 115, 101, 110, 116, 105, 97, 108, 108, 121, 10, 10, 65, 108, 116, 104, 111, 117, 103, 104, 32, 60, 47, 116, 101, 120, 116, 97, 114, 101, 97, 62, 116, 104, 117, 110, 100, 101, 114, 98, 105, 114, 100, 114, 101, 112, 114, 101, 115, 101, 110, 116, 101, 100, 38, 97, 109, 112, 59, 110, 100, 97, 115, 104, 59, 115, 112, 101, 99, 117, 108, 97, 116, 105, 111, 110, 99, 111, 109, 109, 117, 110, 105, 116, 105, 101, 115, 108, 101, 103, 105, 115, 108, 97, 116, 105, 111, 110, 101, 108, 101, 99, 116, 114, 111, 110, 105, 99, 115, 10, 9, 60, 100, 105, 118, 32, 105, 100, 61, 34, 105, 108, 108, 117, 115, 116, 114, 97, 116, 101, 100, 101, 110, 103, 105, 110, 101, 101, 114, 105, 110, 103, 116, 101, 114, 114, 105, 116, 111, 114, 105, 101, 115, 97, 117, 116, 104, 111, 114, 105, 116, 105, 101, 115, 100, 105, 115, 116, 114, 105, 98, 117, 116, 101, 100, 54, 34, 32, 104, 101, 105, 103, 104, 116, 61, 34, 115, 97, 110, 115, 45, 115, 101, 114, 105, 102, 59, 99, 97, 112, 97, 98, 108, 101, 32, 111, 102, 32, 100, 105, 115, 97, 112, 112, 101, 97, 114, 101, 100, 105, 110, 116, 101, 114, 97, 99, 116, 105, 118, 101, 108, 111, 111, 107, 105, 110, 103, 32, 102, 111, 114, 105, 116, 32, 119, 111, 117, 108, 100, 32, 98, 101, 65, 102, 103, 104, 97, 110, 105, 115, 116, 97, 110, 119, 97, 115, 32, 99, 114, 101, 97, 116, 101, 100, 77, 97, 116, 104, 46, 102, 108, 111, 111, 114, 40, 115, 117, 114, 114, 111, 117, 110, 100, 105, 110, 103, 99, 97, 110, 32, 97, 108, 115, 111, 32, 98, 101, 111, 98, 115, 101, 114, 118, 97, 116, 105, 111, 110, 109, 97, 105, 110, 116, 101, 110, 97, 110, 99, 101, 101, 110, 99, 111, 117, 110, 116, 101, 114, 101, 100, 60, 104, 50, 32, 99, 108, 97, 115, 115, 61, 34, 109, 111, 114, 101, 32, 114, 101, 99, 101, 110, 116, 105, 116, 32, 104, 97, 115, 32, 98, 101, 101, 110, 105, 110, 118, 97, 115, 105, 111, 110, 32, 111, 102, 41, 46, 103, 101, 116, 84, 105, 109, 101, 40, 41, 102, 117, 110, 100, 97, 109, 101, 110, 116, 97, 108, 68, 101, 115, 112, 105, 116, 101, 32, 116, 104, 101, 34, 62, 60, 100, 105, 118, 32, 105, 100, 61, 34, 105, 110, 115, 112, 105, 114, 97, 116, 105, 111, 110, 101, 120, 97, 109, 105, 110, 97, 116, 105, 111, 110, 112, 114, 101, 112, 97, 114, 97, 116, 105, 111, 110, 101, 120, 112, 108, 97, 110, 97, 116, 105, 111, 110, 60, 105, 110, 112, 117, 116, 32, 105, 100, 61, 34, 60, 47, 97, 62, 60, 47, 115, 112, 97, 110, 62, 118, 101, 114, 115, 105, 111, 110, 115, 32, 111, 102, 105, 110, 115, 116, 114, 117, 109, 101, 110, 116, 115, 98, 101, 102, 111, 114, 101, 32, 116, 104, 101, 32, 32, 61, 32, 39, 104, 116, 116, 112, 58, 47, 47, 68, 101, 115, 99, 114, 105, 112, 116, 105, 111, 110, 114, 101, 108, 97, 116, 105, 118, 101, 108, 121, 32, 46, 115, 117, 98, 115, 116, 114, 105, 110, 103, 40, 101, 97, 99, 104, 32, 111, 102, 32, 116, 104, 101, 101, 120, 112, 101, 114, 105, 109, 101, 110, 116, 115, 105, 110, 102, 108, 117, 101, 110, 116, 105, 97, 108, 105, 110, 116, 101, 103, 114, 97, 116, 105, 111, 110, 109, 97, 110, 121, 32, 112, 101, 111, 112, 108, 101, 100, 117, 101, 32, 116, 111, 32, 116, 104, 101, 32, 99, 111, 109, 98, 105, 110, 97, 116, 105, 111, 110, 100, 111, 32, 110, 111, 116, 32, 104, 97, 118, 101, 77, 105, 100, 100, 108, 101, 32, 69, 97, 115, 116, 60, 110, 111, 115, 99, 114, 105, 112, 116, 62, 60, 99, 111, 112, 121, 114, 105, 103, 104, 116, 34, 32, 112, 101, 114, 104, 97, 112, 115, 32, 116, 104, 101, 105, 110, 115, 116, 105, 116, 117, 116, 105, 111, 110, 105, 110, 32, 68, 101, 99, 101, 109, 98, 101, 114, 97, 114, 114, 97, 110, 103, 101, 109, 101, 110, 116, 109, 111, 115, 116, 32, 102, 97, 109, 111, 117, 115, 112, 101, 114, 115, 111, 110, 97, 108, 105, 116, 121, 99, 114, 101, 97, 116, 105, 111, 110, 32, 111, 102, 108, 105, 109, 105, 116, 97, 116, 105, 111, 110, 115, 101, 120, 99, 108, 117, 115, 105, 118, 101, 108, 121, 115, 111, 118, 101, 114, 101, 105, 103, 110, 116, 121, 45, 99, 111, 110, 116, 101, 110, 116, 34, 62, 10, 60, 116, 100, 32, 99, 108, 97, 115, 115, 61, 34, 117, 110, 100, 101, 114, 103, 114, 111, 117, 110, 100, 112, 97, 114, 97, 108, 108, 101, 108, 32, 116, 111, 100, 111, 99, 116, 114, 105, 110, 101, 32, 111, 102, 111, 99, 99, 117, 112, 105, 101, 100, 32, 98, 121, 116, 101, 114, 109, 105, 110, 111, 108, 111, 103, 121, 82, 101, 110, 97, 105, 115, 115, 97, 110, 99, 101, 97, 32, 110, 117, 109, 98, 101, 114, 32, 111, 102, 115, 117, 112, 112, 111, 114, 116, 32, 102, 111, 114, 101, 120, 112, 108, 111, 114, 97, 116, 105, 111, 110, 114, 101, 99, 111, 103, 110, 105, 116, 105, 111, 110, 112, 114, 101, 100, 101, 99, 101, 115, 115, 111, 114, 60, 105, 109, 103, 32, 115, 114, 99, 61, 34, 47, 60, 104, 49, 32, 99, 108, 97, 115, 115, 61, 34, 112, 117, 98, 108, 105, 99, 97, 116, 105, 111, 110, 109, 97, 121, 32, 97, 108, 115, 111, 32, 98, 101, 115, 112, 101, 99, 105, 97, 108, 105, 122, 101, 100, 60, 47, 102, 105, 101, 108, 100, 115, 101, 116, 62, 112, 114, 111, 103, 114, 101, 115, 115, 105, 118, 101, 109, 105, 108, 108, 105, 111, 110, 115, 32, 111, 102, 115, 116, 97, 116, 101, 115, 32, 116, 104, 97, 116, 101, 110, 102, 111, 114, 99, 101, 109, 101, 110, 116, 97, 114, 111, 117, 110, 100, 32, 116, 104, 101, 32, 111, 110, 101, 32, 97, 110, 111, 116, 104, 101, 114, 46, 112, 97, 114, 101, 110, 116, 78, 111, 100, 101, 97, 103, 114, 105, 99, 117, 108, 116, 117, 114, 101, 65, 108, 116, 101, 114, 110, 97, 116, 105, 118, 101, 114, 101, 115, 101, 97, 114, 99, 104, 101, 114, 115, 116, 111, 119, 97, 114, 100, 115, 32, 116, 104, 101, 77, 111, 115, 116, 32, 111, 102, 32, 116, 104, 101, 109, 97, 110, 121, 32, 111, 116, 104, 101, 114, 32, 40, 101, 115, 112, 101, 99, 105, 97, 108, 108, 121, 60, 116, 100, 32, 119, 105, 100, 116, 104, 61, 34, 59, 119, 105, 100, 116, 104, 58, 49, 48, 48, 37, 105, 110, 100, 101, 112, 101, 110, 100, 101, 110, 116, 60, 104, 51, 32, 99, 108, 97, 115, 115, 61, 34, 32, 111, 110, 99, 104, 97, 110, 103, 101, 61, 34, 41, 46, 97, 100, 100, 67, 108, 97, 115, 115, 40, 105, 110, 116, 101, 114, 97, 99, 116, 105, 111, 110, 79, 110, 101, 32, 111, 102, 32, 116, 104, 101, 32, 100, 97, 117, 103, 104, 116, 101, 114, 32, 111, 102, 97, 99, 99, 101, 115, 115, 111, 114, 105, 101, 115, 98, 114, 97, 110, 99, 104, 101, 115, 32, 111, 102, 13, 10, 60, 100, 105, 118, 32, 105, 100, 61, 34, 116, 104, 101, 32, 108, 97, 114, 103, 101, 115, 116, 100, 101, 99, 108, 97, 114, 97, 116, 105, 111, 110, 114, 101, 103, 117, 108, 97, 116, 105, 111, 110, 115, 73, 110, 102, 111, 114, 109, 97, 116, 105, 111, 110, 116, 114, 97, 110, 115, 108, 97, 116, 105, 111, 110, 100, 111, 99, 117, 109, 101, 110, 116, 97, 114, 121, 105, 110, 32, 111, 114, 100, 101, 114, 32, 116, 111, 34, 62, 10, 60, 104, 101, 97, 100, 62, 10, 60, 34, 32, 104, 101, 105, 103, 104, 116, 61, 34, 49, 97, 99, 114, 111, 115, 115, 32, 116, 104, 101, 32, 111, 114, 105, 101, 110, 116, 97, 116, 105, 111, 110, 41, 59, 60, 47, 115, 99, 114, 105, 112, 116, 62, 105, 109, 112, 108, 101, 109, 101, 110, 116, 101, 100, 99, 97, 110, 32, 98, 101, 32, 115, 101, 101, 110, 116, 104, 101, 114, 101, 32, 119, 97, 115, 32, 97, 100, 101, 109, 111, 110, 115, 116, 114, 97, 116, 101, 99, 111, 110, 116, 97, 105, 110, 101, 114, 34, 62, 99, 111, 110, 110, 101, 99, 116, 105, 111, 110, 115, 116, 104, 101, 32, 66, 114, 105, 116, 105, 115, 104, 119, 97, 115, 32, 119, 114, 105, 116, 116, 101, 110, 33, 105, 109, 112, 111, 114, 116, 97, 110, 116, 59, 112, 120, 59, 32, 109, 97, 114, 103, 105, 110, 45, 102, 111, 108, 108, 111, 119, 101, 100, 32, 98, 121, 97, 98, 105, 108, 105, 116, 121, 32, 116, 111, 32, 99, 111, 109, 112, 108, 105, 99, 97, 116, 101, 100, 100, 117, 114, 105, 110, 103, 32, 116, 104, 101, 32, 105, 109, 109, 105, 103, 114, 97, 116, 105, 111, 110, 97, 108, 115, 111, 32, 99, 97, 108, 108, 101, 100, 60, 104, 52, 32, 99, 108, 97, 115, 115, 61, 34, 100, 105, 115, 116, 105, 110, 99, 116, 105, 111, 110, 114, 101, 112, 108, 97, 99, 101, 100, 32, 98, 121, 103, 111, 118, 101, 114, 110, 109, 101, 110, 116, 115, 108, 111, 99, 97, 116, 105, 111, 110, 32, 111, 102, 105, 110, 32, 78, 111, 118, 101, 109, 98, 101, 114, 119, 104, 101, 116, 104, 101, 114, 32, 116, 104, 101, 60, 47, 112, 62, 10, 60, 47, 100, 105, 118, 62, 97, 99, 113, 117, 105, 115, 105, 116, 105, 111, 110, 99, 97, 108, 108, 101, 100, 32, 116, 104, 101, 32, 112, 101, 114, 115, 101, 99, 117, 116, 105, 111, 110, 100, 101, 115, 105, 103, 110, 97, 116, 105, 111, 110, 123, 102, 111, 110, 116, 45, 115, 105, 122, 101, 58, 97, 112, 112, 101, 97, 114, 101, 100, 32, 105, 110, 105, 110, 118, 101, 115, 116, 105, 103, 97, 116, 101, 101, 120, 112, 101, 114, 105, 101, 110, 99, 101, 100, 109, 111, 115, 116, 32, 108, 105, 107, 101, 108, 121, 119, 105, 100, 101, 108, 121, 32, 117, 115, 101, 100, 100, 105, 115, 99, 117, 115, 115, 105, 111, 110, 115, 112, 114, 101, 115, 101, 110, 99, 101, 32, 111, 102, 32, 40, 100, 111, 99, 117, 109, 101, 110, 116, 46, 101, 120, 116, 101, 110, 115, 105, 118, 101, 108, 121, 73, 116, 32, 104, 97, 115, 32, 98, 101, 101, 110, 105, 116, 32, 100, 111, 101, 115, 32, 110, 111, 116, 99, 111, 110, 116, 114, 97, 114, 121, 32, 116, 111, 105, 110, 104, 97, 98, 105, 116, 97, 110, 116, 115, 105, 109, 112, 114, 111, 118, 101, 109, 101, 110, 116, 115, 99, 104, 111, 108, 97, 114, 115, 104, 105, 112, 99, 111, 110, 115, 117, 109, 112, 116, 105, 111, 110, 105, 110, 115, 116, 114, 117, 99, 116, 105, 111, 110, 102, 111, 114, 32, 101, 120, 97, 109, 112, 108, 101, 111, 110, 101, 32, 111, 114, 32, 109, 111, 114, 101, 112, 120, 59, 32, 112, 97, 100, 100, 105, 110, 103, 116, 104, 101, 32, 99, 117, 114, 114, 101, 110, 116, 97, 32, 115, 101, 114, 105, 101, 115, 32, 111, 102, 97, 114, 101, 32, 117, 115, 117, 97, 108, 108, 121, 114, 111, 108, 101, 32, 105, 110, 32, 116, 104, 101, 112, 114, 101, 118, 105, 111, 117, 115, 108, 121, 32, 100, 101, 114, 105, 118, 97, 116, 105, 118, 101, 115, 101, 118, 105, 100, 101, 110, 99, 101, 32, 111, 102, 101, 120, 112, 101, 114, 105, 101, 110, 99, 101, 115, 99, 111, 108, 111, 114, 115, 99, 104, 101, 109, 101, 115, 116, 97, 116, 101, 100, 32, 116, 104, 97, 116, 99, 101, 114, 116, 105, 102, 105, 99, 97, 116, 101, 60, 47, 97, 62, 60, 47, 100, 105, 118, 62, 10, 32, 115, 101, 108, 101, 99, 116, 101, 100, 61, 34, 104, 105, 103, 104, 32, 115, 99, 104, 111, 111, 108, 114, 101, 115, 112, 111, 110, 115, 101, 32, 116, 111, 99, 111, 109, 102, 111, 114, 116, 97, 98, 108, 101, 97, 100, 111, 112, 116, 105, 111, 110, 32, 111, 102, 116, 104, 114, 101, 101, 32, 121, 101, 97, 114, 115, 116, 104, 101, 32, 99, 111, 117, 110, 116, 114, 121, 105, 110, 32, 70, 101, 98, 114, 117, 97, 114, 121, 115, 111, 32, 116, 104, 97, 116, 32, 116, 104, 101, 112, 101, 111, 112, 108, 101, 32, 119, 104, 111, 32, 112, 114, 111, 118, 105, 100, 101, 100, 32, 98, 121, 60, 112, 97, 114, 97, 109, 32, 110, 97, 109, 101, 97, 102, 102, 101, 99, 116, 101, 100, 32, 98, 121, 105, 110, 32, 116, 101, 114, 109, 115, 32, 111, 102, 97, 112, 112, 111, 105, 110, 116, 109, 101, 110, 116, 73, 83, 79, 45, 56, 56, 53, 57, 45, 49, 34, 119, 97, 115, 32, 98, 111, 114, 110, 32, 105, 110, 104, 105, 115, 116, 111, 114, 105, 99, 97, 108, 32, 114, 101, 103, 97, 114, 100, 101, 100, 32, 97, 115, 109, 101, 97, 115, 117, 114, 101, 109, 101, 110, 116, 105, 115, 32, 98, 97, 115, 101, 100, 32, 111, 110, 32, 97, 110, 100, 32, 111, 116, 104, 101, 114, 32, 58, 32, 102, 117, 110, 99, 116, 105, 111, 110, 40, 115, 105, 103, 110, 105, 102, 105, 99, 97, 110, 116, 99, 101, 108, 101, 98, 114, 97, 116, 105, 111, 110, 116, 114, 97, 110, 115, 109, 105, 116, 116, 101, 100, 47, 106, 115, 47, 106, 113, 117, 101, 114, 121, 46, 105, 115, 32, 107, 110, 111, 119, 110, 32, 97, 115, 116, 104, 101, 111, 114, 101, 116, 105, 99, 97, 108, 32, 116, 97, 98, 105, 110, 100, 101, 120, 61, 34, 105, 116, 32, 99, 111, 117, 108, 100, 32, 98, 101, 60, 110, 111, 115, 99, 114, 105, 112, 116, 62, 10, 104, 97, 118, 105, 110, 103, 32, 98, 101, 101, 110, 13, 10, 60, 104, 101, 97, 100, 62, 13, 10, 60, 32, 38, 113, 117, 111, 116, 59, 84, 104, 101, 32, 99, 111, 109, 112, 105, 108, 97, 116, 105, 111, 110, 104, 101, 32, 104, 97, 100, 32, 98, 101, 101, 110, 112, 114, 111, 100, 117, 99, 101, 100, 32, 98, 121, 112, 104, 105, 108, 111, 115, 111, 112, 104, 101, 114, 99, 111, 110, 115, 116, 114, 117, 99, 116, 101, 100, 105, 110, 116, 101, 110, 100, 101, 100, 32, 116, 111, 97, 109, 111, 110, 103, 32, 111, 116, 104, 101, 114, 99, 111, 109, 112, 97, 114, 101, 100, 32, 116, 111, 116, 111, 32, 115, 97, 121, 32, 116, 104, 97, 116, 69, 110, 103, 105, 110, 101, 101, 114, 105, 110, 103, 97, 32, 100, 105, 102, 102, 101, 114, 101, 110, 116, 114, 101, 102, 101, 114, 114, 101, 100, 32, 116, 111, 100, 105, 102, 102, 101, 114, 101, 110, 99, 101, 115, 98, 101, 108, 105, 101, 102, 32, 116, 104, 97, 116, 112, 104, 111, 116, 111, 103, 114, 97, 112, 104, 115, 105, 100, 101, 110, 116, 105, 102, 121, 105, 110, 103, 72, 105, 115, 116, 111, 114, 121, 32, 111, 102, 32, 82, 101, 112, 117, 98, 108, 105, 99, 32, 111, 102, 110, 101, 99, 101, 115, 115, 97, 114, 105, 108, 121, 112, 114, 111, 98, 97, 98, 105, 108, 105, 116, 121, 116, 101, 99, 104, 110, 105, 99, 97, 108, 108, 121, 108, 101, 97, 118, 105, 110, 103, 32, 116, 104, 101, 115, 112, 101, 99, 116, 97, 99, 117, 108, 97, 114, 102, 114, 97, 99, 116, 105, 111, 110, 32, 111, 102, 101, 108, 101, 99, 116, 114, 105, 99, 105, 116, 121, 104, 101, 97, 100, 32, 111, 102, 32, 116, 104, 101, 114, 101, 115, 116, 97, 117, 114, 97, 110, 116, 115, 112, 97, 114, 116, 110, 101, 114, 115, 104, 105, 112, 101, 109, 112, 104, 97, 115, 105, 115, 32, 111, 110, 109, 111, 115, 116, 32, 114, 101, 99, 101, 110, 116, 115, 104, 97, 114, 101, 32, 119, 105, 116, 104, 32, 115, 97, 121, 105, 110, 103, 32, 116, 104, 97, 116, 102, 105, 108, 108, 101, 100, 32, 119, 105, 116, 104, 100, 101, 115, 105, 103, 110, 101, 100, 32, 116, 111, 105, 116, 32, 105, 115, 32, 111, 102, 116, 101, 110, 34, 62, 60, 47, 105, 102, 114, 97, 109, 101, 62, 97, 115, 32, 102, 111, 108, 108, 111, 119, 115, 58, 109, 101, 114, 103, 101, 100, 32, 119, 105, 116, 104, 116, 104, 114, 111, 117, 103, 104, 32, 116, 104, 101, 99, 111, 109, 109, 101, 114, 99, 105, 97, 108, 32, 112, 111, 105, 110, 116, 101, 100, 32, 111, 117, 116, 111, 112, 112, 111, 114, 116, 117, 110, 105, 116, 121, 118, 105, 101, 119, 32, 111, 102, 32, 116, 104, 101, 114, 101, 113, 117, 105, 114, 101, 109, 101, 110, 116, 100, 105, 118, 105, 115, 105, 111, 110, 32, 111, 102, 112, 114, 111, 103, 114, 97, 109, 109, 105, 110, 103, 104, 101, 32, 114, 101, 99, 101, 105, 118, 101, 100, 115, 101, 116, 73, 110, 116, 101, 114, 118, 97, 108, 34, 62, 60, 47, 115, 112, 97, 110, 62, 60, 47, 105, 110, 32, 78, 101, 119, 32, 89, 111, 114, 107, 97, 100, 100, 105, 116, 105, 111, 110, 97, 108, 32, 99, 111, 109, 112, 114, 101, 115, 115, 105, 111, 110, 10, 10, 60, 100, 105, 118, 32, 105, 100, 61, 34, 105, 110, 99, 111, 114, 112, 111, 114, 97, 116, 101, 59, 60, 47, 115, 99, 114, 105, 112, 116, 62, 60, 97, 116, 116, 97, 99, 104, 69, 118, 101, 110, 116, 98, 101, 99, 97, 109, 101, 32, 116, 104, 101, 32, 34, 32, 116, 97, 114, 103, 101, 116, 61, 34, 95, 99, 97, 114, 114, 105, 101, 100, 32, 111, 117, 116, 83, 111, 109, 101, 32, 111, 102, 32, 116, 104, 101, 115, 99, 105, 101, 110, 99, 101, 32, 97, 110, 100, 116, 104, 101, 32, 116, 105, 109, 101, 32, 111, 102, 67, 111, 110, 116, 97, 105, 110, 101, 114, 34, 62, 109, 97, 105, 110, 116, 97, 105, 110, 105, 110, 103, 67, 104, 114, 105, 115, 116, 111, 112, 104, 101, 114, 77, 117, 99, 104, 32, 111, 102, 32, 116, 104, 101, 119, 114, 105, 116, 105, 110, 103, 115, 32, 111, 102, 34, 32, 104, 101, 105, 103, 104, 116, 61, 34, 50, 115, 105, 122, 101, 32, 111, 102, 32, 116, 104, 101, 118, 101, 114, 115, 105, 111, 110, 32, 111, 102, 32, 109, 105, 120, 116, 117, 114, 101, 32, 111, 102, 32, 98, 101, 116, 119, 101, 101, 110, 32, 116, 104, 101, 69, 120, 97, 109, 112, 108, 101, 115, 32, 111, 102, 101, 100, 117, 99, 97, 116, 105, 111, 110, 97, 108, 99, 111, 109, 112, 101, 116, 105, 116, 105, 118, 101, 32, 111, 110, 115, 117, 98, 109, 105, 116, 61, 34, 100, 105, 114, 101, 99, 116, 111, 114, 32, 111, 102, 100, 105, 115, 116, 105, 110, 99, 116, 105, 118, 101, 47, 68, 84, 68, 32, 88, 72, 84, 77, 76, 32, 114, 101, 108, 97, 116, 105, 110, 103, 32, 116, 111, 116, 101, 110, 100, 101, 110, 99, 121, 32, 116, 111, 112, 114, 111, 118, 105, 110, 99, 101, 32, 111, 102, 119, 104, 105, 99, 104, 32, 119, 111, 117, 108, 100, 100, 101, 115, 112, 105, 116, 101, 32, 116, 104, 101, 115, 99, 105, 101, 110, 116, 105, 102, 105, 99, 32, 108, 101, 103, 105, 115, 108, 97, 116, 117, 114, 101, 46, 105, 110, 110, 101, 114, 72, 84, 77, 76, 32, 97, 108, 108, 101, 103, 97, 116, 105, 111, 110, 115, 65, 103, 114, 105, 99, 117, 108, 116, 117, 114, 101, 119, 97, 115, 32, 117, 115, 101, 100, 32, 105, 110, 97, 112, 112, 114, 111, 97, 99, 104, 32, 116, 111, 105, 110, 116, 101, 108, 108, 105, 103, 101, 110, 116, 121, 101, 97, 114, 115, 32, 108, 97, 116, 101, 114, 44, 115, 97, 110, 115, 45, 115, 101, 114, 105, 102, 100, 101, 116, 101, 114, 109, 105, 110, 105, 110, 103, 80, 101, 114, 102, 111, 114, 109, 97, 110, 99, 101, 97, 112, 112, 101, 97, 114, 97, 110, 99, 101, 115, 44, 32, 119, 104, 105, 99, 104, 32, 105, 115, 32, 102, 111, 117, 110, 100, 97, 116, 105, 111, 110, 115, 97, 98, 98, 114, 101, 118, 105, 97, 116, 101, 100, 104, 105, 103, 104, 101, 114, 32, 116, 104, 97, 110, 115, 32, 102, 114, 111, 109, 32, 116, 104, 101, 32, 105, 110, 100, 105, 118, 105, 100, 117, 97, 108, 32, 99, 111, 109, 112, 111, 115, 101, 100, 32, 111, 102, 115, 117, 112, 112, 111, 115, 101, 100, 32, 116, 111, 99, 108, 97, 105, 109, 115, 32, 116, 104, 97, 116, 97, 116, 116, 114, 105, 98, 117, 116, 105, 111, 110, 102, 111, 110, 116, 45, 115, 105, 122, 101, 58, 49, 101, 108, 101, 109, 101, 110, 116, 115, 32, 111, 102, 72, 105, 115, 116, 111, 114, 105, 99, 97, 108, 32, 104, 105, 115, 32, 98, 114, 111, 116, 104, 101, 114, 97, 116, 32, 116, 104, 101, 32, 116, 105, 109, 101, 97, 110, 110, 105, 118, 101, 114, 115, 97, 114, 121, 103, 111, 118, 101, 114, 110, 101, 100, 32, 98, 121, 114, 101, 108, 97, 116, 101, 100, 32, 116, 111, 32, 117, 108, 116, 105, 109, 97, 116, 101, 108, 121, 32, 105, 110, 110, 111, 118, 97, 116, 105, 111, 110, 115, 105, 116, 32, 105, 115, 32, 115, 116, 105, 108, 108, 99, 97, 110, 32, 111, 110, 108, 121, 32, 98, 101, 100, 101, 102, 105, 110, 105, 116, 105, 111, 110, 115, 116, 111, 71, 77, 84, 83, 116, 114, 105, 110, 103, 65, 32, 110, 117, 109, 98, 101, 114, 32, 111, 102, 105, 109, 103, 32, 99, 108, 97, 115, 115, 61, 34, 69, 118, 101, 110, 116, 117, 97, 108, 108, 121, 44, 119, 97, 115, 32, 99, 104, 97, 110, 103, 101, 100, 111, 99, 99, 117, 114, 114, 101, 100, 32, 105, 110, 110, 101, 105, 103, 104, 98, 111, 114, 105, 110, 103, 100, 105, 115, 116, 105, 110, 103, 117, 105, 115, 104, 119, 104, 101, 110, 32, 104, 101, 32, 119, 97, 115, 105, 110, 116, 114, 111, 100, 117, 99, 105, 110, 103, 116, 101, 114, 114, 101, 115, 116, 114, 105, 97, 108, 77, 97, 110, 121, 32, 111, 102, 32, 116, 104, 101, 97, 114, 103, 117, 101, 115, 32, 116, 104, 97, 116, 97, 110, 32, 65, 109, 101, 114, 105, 99, 97, 110, 99, 111, 110, 113, 117, 101, 115, 116, 32, 111, 102, 119, 105, 100, 101, 115, 112, 114, 101, 97, 100, 32, 119, 101, 114, 101, 32, 107, 105, 108, 108, 101, 100, 115, 99, 114, 101, 101, 110, 32, 97, 110, 100, 32, 73, 110, 32, 111, 114, 100, 101, 114, 32, 116, 111, 101, 120, 112, 101, 99, 116, 101, 100, 32, 116, 111, 100, 101, 115, 99, 101, 110, 100, 97, 110, 116, 115, 97, 114, 101, 32, 108, 111, 99, 97, 116, 101, 100, 108, 101, 103, 105, 115, 108, 97, 116, 105, 118, 101, 103, 101, 110, 101, 114, 97, 116, 105, 111, 110, 115, 32, 98, 97, 99, 107, 103, 114, 111, 117, 110, 100, 109, 111, 115, 116, 32, 112, 101, 111, 112, 108, 101, 121, 101, 97, 114, 115, 32, 97, 102, 116, 101, 114, 116, 104, 101, 114, 101, 32, 105, 115, 32, 110, 111, 116, 104, 101, 32, 104, 105, 103, 104, 101, 115, 116, 102, 114, 101, 113, 117, 101, 110, 116, 108, 121, 32, 116, 104, 101, 121, 32, 100, 111, 32, 110, 111, 116, 97, 114, 103, 117, 101, 100, 32, 116, 104, 97, 116, 115, 104, 111, 119, 101, 100, 32, 116, 104, 97, 116, 112, 114, 101, 100, 111, 109, 105, 110, 97, 110, 116, 116, 104, 101, 111, 108, 111, 103, 105, 99, 97, 108, 98, 121, 32, 116, 104, 101, 32, 116, 105, 109, 101, 99, 111, 110, 115, 105, 100, 101, 114, 105, 110, 103, 115, 104, 111, 114, 116, 45, 108, 105, 118, 101, 100, 60, 47, 115, 112, 97, 110, 62, 60, 47, 97, 62, 99, 97, 110, 32, 98, 101, 32, 117, 115, 101, 100, 118, 101, 114, 121, 32, 108, 105, 116, 116, 108, 101, 111, 110, 101, 32, 111, 102, 32, 116, 104, 101, 32, 104, 97, 100, 32, 97, 108, 114, 101, 97, 100, 121, 105, 110, 116, 101, 114, 112, 114, 101, 116, 101, 100, 99, 111, 109, 109, 117, 110, 105, 99, 97, 116, 101, 102, 101, 97, 116, 117, 114, 101, 115, 32, 111, 102, 103, 111, 118, 101, 114, 110, 109, 101, 110, 116, 44, 60, 47, 110, 111, 115, 99, 114, 105, 112, 116, 62, 101, 110, 116, 101, 114, 101, 100, 32, 116, 104, 101, 34, 32, 104, 101, 105, 103, 104, 116, 61, 34, 51, 73, 110, 100, 101, 112, 101, 110, 100, 101, 110, 116, 112, 111, 112, 117, 108, 97, 116, 105, 111, 110, 115, 108, 97, 114, 103, 101, 45, 115, 99, 97, 108, 101, 46, 32, 65, 108, 116, 104, 111, 117, 103, 104, 32, 117, 115, 101, 100, 32, 105, 110, 32, 116, 104, 101, 100, 101, 115, 116, 114, 117, 99, 116, 105, 111, 110, 112, 111, 115, 115, 105, 98, 105, 108, 105, 116, 121, 115, 116, 97, 114, 116, 105, 110, 103, 32, 105, 110, 116, 119, 111, 32, 111, 114, 32, 109, 111, 114, 101, 101, 120, 112, 114, 101, 115, 115, 105, 111, 110, 115, 115, 117, 98, 111, 114, 100, 105, 110, 97, 116, 101, 108, 97, 114, 103, 101, 114, 32, 116, 104, 97, 110, 104, 105, 115, 116, 111, 114, 121, 32, 97, 110, 100, 60, 47, 111, 112, 116, 105, 111, 110, 62, 13, 10, 67, 111, 110, 116, 105, 110, 101, 110, 116, 97, 108, 101, 108, 105, 109, 105, 110, 97, 116, 105, 110, 103, 119, 105, 108, 108, 32, 110, 111, 116, 32, 98, 101, 112, 114, 97, 99, 116, 105, 99, 101, 32, 111, 102, 105, 110, 32, 102, 114, 111, 110, 116, 32, 111, 102, 115, 105, 116, 101, 32, 111, 102, 32, 116, 104, 101, 101, 110, 115, 117, 114, 101, 32, 116, 104, 97, 116, 116, 111, 32, 99, 114, 101, 97, 116, 101, 32, 97, 109, 105, 115, 115, 105, 115, 115, 105, 112, 112, 105, 112, 111, 116, 101, 110, 116, 105, 97, 108, 108, 121, 111, 117, 116, 115, 116, 97, 110, 100, 105, 110, 103, 98, 101, 116, 116, 101, 114, 32, 116, 104, 97, 110, 119, 104, 97, 116, 32, 105, 115, 32, 110, 111, 119, 115, 105, 116, 117, 97, 116, 101, 100, 32, 105, 110, 109, 101, 116, 97, 32, 110, 97, 109, 101, 61, 34, 84, 114, 97, 100, 105, 116, 105, 111, 110, 97, 108, 115, 117, 103, 103, 101, 115, 116, 105, 111, 110, 115, 84, 114, 97, 110, 115, 108, 97, 116, 105, 111, 110, 116, 104, 101, 32, 102, 111, 114, 109, 32, 111, 102, 97, 116, 109, 111, 115, 112, 104, 101, 114, 105, 99, 105, 100, 101, 111, 108, 111, 103, 105, 99, 97, 108, 101, 110, 116, 101, 114, 112, 114, 105, 115, 101, 115, 99, 97, 108, 99, 117, 108, 97, 116, 105, 110, 103, 101, 97, 115, 116, 32, 111, 102, 32, 116, 104, 101, 114, 101, 109, 110, 97, 110, 116, 115, 32, 111, 102, 112, 108, 117, 103, 105, 110, 115, 112, 97, 103, 101, 47, 105, 110, 100, 101, 120, 46, 112, 104, 112, 63, 114, 101, 109, 97, 105, 110, 101, 100, 32, 105, 110, 116, 114, 97, 110, 115, 102, 111, 114, 109, 101, 100, 72, 101, 32, 119, 97, 115, 32, 97, 108, 115, 111, 119, 97, 115, 32, 97, 108, 114, 101, 97, 100, 121, 115, 116, 97, 116, 105, 115, 116, 105, 99, 97, 108, 105, 110, 32, 102, 97, 118, 111, 114, 32, 111, 102, 77, 105, 110, 105, 115, 116, 114, 121, 32, 111, 102, 109, 111, 118, 101, 109, 101, 110, 116, 32, 111, 102, 102, 111, 114, 109, 117, 108, 97, 116, 105, 111, 110, 105, 115, 32, 114, 101, 113, 117, 105, 114, 101, 100, 60, 108, 105, 110, 107, 32, 114, 101, 108, 61, 34, 84, 104, 105, 115, 32, 105, 115, 32, 116, 104, 101, 32, 60, 97, 32, 104, 114, 101, 102, 61, 34, 47, 112, 111, 112, 117, 108, 97, 114, 105, 122, 101, 100, 105, 110, 118, 111, 108, 118, 101, 100, 32, 105, 110, 97, 114, 101, 32, 117, 115, 101, 100, 32, 116, 111, 97, 110, 100, 32, 115, 101, 118, 101, 114, 97, 108, 109, 97, 100, 101, 32, 98, 121, 32, 116, 104, 101, 115, 101, 101, 109, 115, 32, 116, 111, 32, 98, 101, 108, 105, 107, 101, 108, 121, 32, 116, 104, 97, 116, 80, 97, 108, 101, 115, 116, 105, 110, 105, 97, 110, 110, 97, 109, 101, 100, 32, 97, 102, 116, 101, 114, 105, 116, 32, 104, 97, 100, 32, 98, 101, 101, 110, 109, 111, 115, 116, 32, 99, 111, 109, 109, 111, 110, 116, 111, 32, 114, 101, 102, 101, 114, 32, 116, 111, 98, 117, 116, 32, 116, 104, 105, 115, 32, 105, 115, 99, 111, 110, 115, 101, 99, 117, 116, 105, 118, 101, 116, 101, 109, 112, 111, 114, 97, 114, 105, 108, 121, 73, 110, 32, 103, 101, 110, 101, 114, 97, 108, 44, 99, 111, 110, 118, 101, 110, 116, 105, 111, 110, 115, 116, 97, 107, 101, 115, 32, 112, 108, 97, 99, 101, 115, 117, 98, 100, 105, 118, 105, 115, 105, 111, 110, 116, 101, 114, 114, 105, 116, 111, 114, 105, 97, 108, 111, 112, 101, 114, 97, 116, 105, 111, 110, 97, 108, 112, 101, 114, 109, 97, 110, 101, 110, 116, 108, 121, 119, 97, 115, 32, 108, 97, 114, 103, 101, 108, 121, 111, 117, 116, 98, 114, 101, 97, 107, 32, 111, 102, 105, 110, 32, 116, 104, 101, 32, 112, 97, 115, 116, 102, 111, 108, 108, 111, 119, 105, 110, 103, 32, 97, 32, 120, 109, 108, 110, 115, 58, 111, 103, 61, 34, 62, 60, 97, 32, 99, 108, 97, 115, 115, 61, 34, 99, 108, 97, 115, 115, 61, 34, 116, 101, 120, 116, 67, 111, 110, 118, 101, 114, 115, 105, 111, 110, 32, 109, 97, 121, 32, 98, 101, 32, 117, 115, 101, 100, 109, 97, 110, 117, 102, 97, 99, 116, 117, 114, 101, 97, 102, 116, 101, 114, 32, 98, 101, 105, 110, 103, 99, 108, 101, 97, 114, 102, 105, 120, 34, 62, 10, 113, 117, 101, 115, 116, 105, 111, 110, 32, 111, 102, 119, 97, 115, 32, 101, 108, 101, 99, 116, 101, 100, 116, 111, 32, 98, 101, 99, 111, 109, 101, 32, 97, 98, 101, 99, 97, 117, 115, 101, 32, 111, 102, 32, 115, 111, 109, 101, 32, 112, 101, 111, 112, 108, 101, 105, 110, 115, 112, 105, 114, 101, 100, 32, 98, 121, 115, 117, 99, 99, 101, 115, 115, 102, 117, 108, 32, 97, 32, 116, 105, 109, 101, 32, 119, 104, 101, 110, 109, 111, 114, 101, 32, 99, 111, 109, 109, 111, 110, 97, 109, 111, 110, 103, 115, 116, 32, 116, 104, 101, 97, 110, 32, 111, 102, 102, 105, 99, 105, 97, 108, 119, 105, 100, 116, 104, 58, 49, 48, 48, 37, 59, 116, 101, 99, 104, 110, 111, 108, 111, 103, 121, 44, 119, 97, 115, 32, 97, 100, 111, 112, 116, 101, 100, 116, 111, 32, 107, 101, 101, 112, 32, 116, 104, 101, 115, 101, 116, 116, 108, 101, 109, 101, 110, 116, 115, 108, 105, 118, 101, 32, 98, 105, 114, 116, 104, 115, 105, 110, 100, 101, 120, 46, 104, 116, 109, 108, 34, 67, 111, 110, 110, 101, 99, 116, 105, 99, 117, 116, 97, 115, 115, 105, 103, 110, 101, 100, 32, 116, 111, 38, 97, 109, 112, 59, 116, 105, 109, 101, 115, 59, 97, 99, 99, 111, 117, 110, 116, 32, 102, 111, 114, 97, 108, 105, 103, 110, 61, 114, 105, 103, 104, 116, 116, 104, 101, 32, 99, 111, 109, 112, 97, 110, 121, 97, 108, 119, 97, 121, 115, 32, 98, 101, 101, 110, 114, 101, 116, 117, 114, 110, 101, 100, 32, 116, 111, 105, 110, 118, 111, 108, 118, 101, 109, 101, 110, 116, 66, 101, 99, 97, 117, 115, 101, 32, 116, 104, 101, 116, 104, 105, 115, 32, 112, 101, 114, 105, 111, 100, 34, 32, 110, 97, 109, 101, 61, 34, 113, 34, 32, 99, 111, 110, 102, 105, 110, 101, 100, 32, 116, 111, 97, 32, 114, 101, 115, 117, 108, 116, 32, 111, 102, 118, 97, 108, 117, 101, 61, 34, 34, 32, 47, 62, 105, 115, 32, 97, 99, 116, 117, 97, 108, 108, 121, 69, 110, 118, 105, 114, 111, 110, 109, 101, 110, 116, 13, 10, 60, 47, 104, 101, 97, 100, 62, 13, 10, 67, 111, 110, 118, 101, 114, 115, 101, 108, 121, 44, 62, 10, 60, 100, 105, 118, 32, 105, 100, 61, 34, 48, 34, 32, 119, 105, 100, 116, 104, 61, 34, 49, 105, 115, 32, 112, 114, 111, 98, 97, 98, 108, 121, 104, 97, 118, 101, 32, 98, 101, 99, 111, 109, 101, 99, 111, 110, 116, 114, 111, 108, 108, 105, 110, 103, 116, 104, 101, 32, 112, 114, 111, 98, 108, 101, 109, 99, 105, 116, 105, 122, 101, 110, 115, 32, 111, 102, 112, 111, 108, 105, 116, 105, 99, 105, 97, 110, 115, 114, 101, 97, 99, 104, 101, 100, 32, 116, 104, 101, 97, 115, 32, 101, 97, 114, 108, 121, 32, 97, 115, 58, 110, 111, 110, 101, 59, 32, 111, 118, 101, 114, 60, 116, 97, 98, 108, 101, 32, 99, 101, 108, 108, 118, 97, 108, 105, 100, 105, 116, 121, 32, 111, 102, 100, 105, 114, 101, 99, 116, 108, 121, 32, 116, 111, 111, 110, 109, 111, 117, 115, 101, 100, 111, 119, 110, 119, 104, 101, 114, 101, 32, 105, 116, 32, 105, 115, 119, 104, 101, 110, 32, 105, 116, 32, 119, 97, 115, 109, 101, 109, 98, 101, 114, 115, 32, 111, 102, 32, 114, 101, 108, 97, 116, 105, 111, 110, 32, 116, 111, 97, 99, 99, 111, 109, 109, 111, 100, 97, 116, 101, 97, 108, 111, 110, 103, 32, 119, 105, 116, 104, 32, 73, 110, 32, 116, 104, 101, 32, 108, 97, 116, 101, 116, 104, 101, 32, 69, 110, 103, 108, 105, 115, 104, 100, 101, 108, 105, 99, 105, 111, 117, 115, 34, 62, 116, 104, 105, 115, 32, 105, 115, 32, 110, 111, 116, 116, 104, 101, 32, 112, 114, 101, 115, 101, 110, 116, 105, 102, 32, 116, 104, 101, 121, 32, 97, 114, 101, 97, 110, 100, 32, 102, 105, 110, 97, 108, 108, 121, 97, 32, 109, 97, 116, 116, 101, 114, 32, 111, 102, 13, 10, 9, 60, 47, 100, 105, 118, 62, 13, 10, 13, 10, 60, 47, 115, 99, 114, 105, 112, 116, 62, 102, 97, 115, 116, 101, 114, 32, 116, 104, 97, 110, 109, 97, 106, 111, 114, 105, 116, 121, 32, 111, 102, 97, 102, 116, 101, 114, 32, 119, 104, 105, 99, 104, 99, 111, 109, 112, 97, 114, 97, 116, 105, 118, 101, 116, 111, 32, 109, 97, 105, 110, 116, 97, 105, 110, 105, 109, 112, 114, 111, 118, 101, 32, 116, 104, 101, 97, 119, 97, 114, 100, 101, 100, 32, 116, 104, 101, 101, 114, 34, 32, 99, 108, 97, 115, 115, 61, 34, 102, 114, 97, 109, 101, 98, 111, 114, 100, 101, 114, 114, 101, 115, 116, 111, 114, 97, 116, 105, 111, 110, 105, 110, 32, 116, 104, 101, 32, 115, 97, 109, 101, 97, 110, 97, 108, 121, 115, 105, 115, 32, 111, 102, 116, 104, 101, 105, 114, 32, 102, 105, 114, 115, 116, 68, 117, 114, 105, 110, 103, 32, 116, 104, 101, 32, 99, 111, 110, 116, 105, 110, 101, 110, 116, 97, 108, 115, 101, 113, 117, 101, 110, 99, 101, 32, 111, 102, 102, 117, 110, 99, 116, 105, 111, 110, 40, 41, 123, 102, 111, 110, 116, 45, 115, 105, 122, 101, 58, 32, 119, 111, 114, 107, 32, 111, 110, 32, 116, 104, 101, 60, 47, 115, 99, 114, 105, 112, 116, 62, 10, 60, 98, 101, 103, 105, 110, 115, 32, 119, 105, 116, 104, 106, 97, 118, 97, 115, 99, 114, 105, 112, 116, 58, 99, 111, 110, 115, 116, 105, 116, 117, 101, 110, 116, 119, 97, 115, 32, 102, 111, 117, 110, 100, 101, 100, 101, 113, 117, 105, 108, 105, 98, 114, 105, 117, 109, 97, 115, 115, 117, 109, 101, 32, 116, 104, 97, 116, 105, 115, 32, 103, 105, 118, 101, 110, 32, 98, 121, 110, 101, 101, 100, 115, 32, 116, 111, 32, 98, 101, 99, 111, 111, 114, 100, 105, 110, 97, 116, 101, 115, 116, 104, 101, 32, 118, 97, 114, 105, 111, 117, 115, 97, 114, 101, 32, 112, 97, 114, 116, 32, 111, 102, 111, 110, 108, 121, 32, 105, 110, 32, 116, 104, 101, 115, 101, 99, 116, 105, 111, 110, 115, 32, 111, 102, 105, 115, 32, 97, 32, 99, 111, 109, 109, 111, 110, 116, 104, 101, 111, 114, 105, 101, 115, 32, 111, 102, 100, 105, 115, 99, 111, 118, 101, 114, 105, 101, 115, 97, 115, 115, 111, 99, 105, 97, 116, 105, 111, 110, 101, 100, 103, 101, 32, 111, 102, 32, 116, 104, 101, 115, 116, 114, 101, 110, 103, 116, 104, 32, 111, 102, 112, 111, 115, 105, 116, 105, 111, 110, 32, 105, 110, 112, 114, 101, 115, 101, 110, 116, 45, 100, 97, 121, 117, 110, 105, 118, 101, 114, 115, 97, 108, 108, 121, 116, 111, 32, 102, 111, 114, 109, 32, 116, 104, 101, 98, 117, 116, 32, 105, 110, 115, 116, 101, 97, 100, 99, 111, 114, 112, 111, 114, 97, 116, 105, 111, 110, 97, 116, 116, 97, 99, 104, 101, 100, 32, 116, 111, 105, 115, 32, 99, 111, 109, 109, 111, 110, 108, 121, 114, 101, 97, 115, 111, 110, 115, 32, 102, 111, 114, 32, 38, 113, 117, 111, 116, 59, 116, 104, 101, 32, 99, 97, 110, 32, 98, 101, 32, 109, 97, 100, 101, 119, 97, 115, 32, 97, 98, 108, 101, 32, 116, 111, 119, 104, 105, 99, 104, 32, 109, 101, 97, 110, 115, 98, 117, 116, 32, 100, 105, 100, 32, 110, 111, 116, 111, 110, 77, 111, 117, 115, 101, 79, 118, 101, 114, 97, 115, 32, 112, 111, 115, 115, 105, 98, 108, 101, 111, 112, 101, 114, 97, 116, 101, 100, 32, 98, 121, 99, 111, 109, 105, 110, 103, 32, 102, 114, 111, 109, 116, 104, 101, 32, 112, 114, 105, 109, 97, 114, 121, 97, 100, 100, 105, 116, 105, 111, 110, 32, 111, 102, 102, 111, 114, 32, 115, 101, 118, 101, 114, 97, 108, 116, 114, 97, 110, 115, 102, 101, 114, 114, 101, 100, 97, 32, 112, 101, 114, 105, 111, 100, 32, 111, 102, 97, 114, 101, 32, 97, 98, 108, 101, 32, 116, 111, 104, 111, 119, 101, 118, 101, 114, 44, 32, 105, 116, 115, 104, 111, 117, 108, 100, 32, 104, 97, 118, 101, 109, 117, 99, 104, 32, 108, 97, 114, 103, 101, 114, 10, 9, 60, 47, 115, 99, 114, 105, 112, 116, 62, 97, 100, 111, 112, 116, 101, 100, 32, 116, 104, 101, 112, 114, 111, 112, 101, 114, 116, 121, 32, 111, 102, 100, 105, 114, 101, 99, 116, 101, 100, 32, 98, 121, 101, 102, 102, 101, 99, 116, 105, 118, 101, 108, 121, 119, 97, 115, 32, 98, 114, 111, 117, 103, 104, 116, 99, 104, 105, 108, 100, 114, 101, 110, 32, 111, 102, 80, 114, 111, 103, 114, 97, 109, 109, 105, 110, 103, 108, 111, 110, 103, 101, 114, 32, 116, 104, 97, 110, 109, 97, 110, 117, 115, 99, 114, 105, 112, 116, 115, 119, 97, 114, 32, 97, 103, 97, 105, 110, 115, 116, 98, 121, 32, 109, 101, 97, 110, 115, 32, 111, 102, 97, 110, 100, 32, 109, 111, 115, 116, 32, 111, 102, 115, 105, 109, 105, 108, 97, 114, 32, 116, 111, 32, 112, 114, 111, 112, 114, 105, 101, 116, 97, 114, 121, 111, 114, 105, 103, 105, 110, 97, 116, 105, 110, 103, 112, 114, 101, 115, 116, 105, 103, 105, 111, 117, 115, 103, 114, 97, 109, 109, 97, 116, 105, 99, 97, 108, 101, 120, 112, 101, 114, 105, 101, 110, 99, 101, 46, 116, 111, 32, 109, 97, 107, 101, 32, 116, 104, 101, 73, 116, 32, 119, 97, 115, 32, 97, 108, 115, 111, 105, 115, 32, 102, 111, 117, 110, 100, 32, 105, 110, 99, 111, 109, 112, 101, 116, 105, 116, 111, 114, 115, 105, 110, 32, 116, 104, 101, 32, 85, 46, 83, 46, 114, 101, 112, 108, 97, 99, 101, 32, 116, 104, 101, 98, 114, 111, 117, 103, 104, 116, 32, 116, 104, 101, 99, 97, 108, 99, 117, 108, 97, 116, 105, 111, 110, 102, 97, 108, 108, 32, 111, 102, 32, 116, 104, 101, 116, 104, 101, 32, 103, 101, 110, 101, 114, 97, 108, 112, 114, 97, 99, 116, 105, 99, 97, 108, 108, 121, 105, 110, 32, 104, 111, 110, 111, 114, 32, 111, 102, 114, 101, 108, 101, 97, 115, 101, 100, 32, 105, 110, 114, 101, 115, 105, 100, 101, 110, 116, 105, 97, 108, 97, 110, 100, 32, 115, 111, 109, 101, 32, 111, 102, 107, 105, 110, 103, 32, 111, 102, 32, 116, 104, 101, 114, 101, 97, 99, 116, 105, 111, 110, 32, 116, 111, 49, 115, 116, 32, 69, 97, 114, 108, 32, 111, 102, 99, 117, 108, 116, 117, 114, 101, 32, 97, 110, 100, 112, 114, 105, 110, 99, 105, 112, 97, 108, 108, 121, 60, 47, 116, 105, 116, 108, 101, 62, 10, 32, 32, 116, 104, 101, 121, 32, 99, 97, 110, 32, 98, 101, 98, 97, 99, 107, 32, 116, 111, 32, 116, 104, 101, 115, 111, 109, 101, 32, 111, 102, 32, 104, 105, 115, 101, 120, 112, 111, 115, 117, 114, 101, 32, 116, 111, 97, 114, 101, 32, 115, 105, 109, 105, 108, 97, 114, 102, 111, 114, 109, 32, 111, 102, 32, 116, 104, 101, 97, 100, 100, 70, 97, 118, 111, 114, 105, 116, 101, 99, 105, 116, 105, 122, 101, 110, 115, 104, 105, 112, 112, 97, 114, 116, 32, 105, 110, 32, 116, 104, 101, 112, 101, 111, 112, 108, 101, 32, 119, 105, 116, 104, 105, 110, 32, 112, 114, 97, 99, 116, 105, 99, 101, 116, 111, 32, 99, 111, 110, 116, 105, 110, 117, 101, 38, 97, 109, 112, 59, 109, 105, 110, 117, 115, 59, 97, 112, 112, 114, 111, 118, 101, 100, 32, 98, 121, 32, 116, 104, 101, 32, 102, 105, 114, 115, 116, 32, 97, 108, 108, 111, 119, 101, 100, 32, 116, 104, 101, 97, 110, 100, 32, 102, 111, 114, 32, 116, 104, 101, 102, 117, 110, 99, 116, 105, 111, 110, 105, 110, 103, 112, 108, 97, 121, 105, 110, 103, 32, 116, 104, 101, 115, 111, 108, 117, 116, 105, 111, 110, 32, 116, 111, 104, 101, 105, 103, 104, 116, 61, 34, 48, 34, 32, 105, 110, 32, 104, 105, 115, 32, 98, 111, 111, 107, 109, 111, 114, 101, 32, 116, 104, 97, 110, 32, 97, 102, 111, 108, 108, 111, 119, 115, 32, 116, 104, 101, 99, 114, 101, 97, 116, 101, 100, 32, 116, 104, 101, 112, 114, 101, 115, 101, 110, 99, 101, 32, 105, 110, 38, 110, 98, 115, 112, 59, 60, 47, 116, 100, 62, 110, 97, 116, 105, 111, 110, 97, 108, 105, 115, 116, 116, 104, 101, 32, 105, 100, 101, 97, 32, 111, 102, 97, 32, 99, 104, 97, 114, 97, 99, 116, 101, 114, 119, 101, 114, 101, 32, 102, 111, 114, 99, 101, 100, 32, 99, 108, 97, 115, 115, 61, 34, 98, 116, 110, 100, 97, 121, 115, 32, 111, 102, 32, 116, 104, 101, 102, 101, 97, 116, 117, 114, 101, 100, 32, 105, 110, 115, 104, 111, 119, 105, 110, 103, 32, 116, 104, 101, 105, 110, 116, 101, 114, 101, 115, 116, 32, 105, 110, 105, 110, 32, 112, 108, 97, 99, 101, 32, 111, 102, 116, 117, 114, 110, 32, 111, 102, 32, 116, 104, 101, 116, 104, 101, 32, 104, 101, 97, 100, 32, 111, 102, 76, 111, 114, 100, 32, 111, 102, 32, 116, 104, 101, 112, 111, 108, 105, 116, 105, 99, 97, 108, 108, 121, 104, 97, 115, 32, 105, 116, 115, 32, 111, 119, 110, 69, 100, 117, 99, 97, 116, 105, 111, 110, 97, 108, 97, 112, 112, 114, 111, 118, 97, 108, 32, 111, 102, 115, 111, 109, 101, 32, 111, 102, 32, 116, 104, 101, 101, 97, 99, 104, 32, 111, 116, 104, 101, 114, 44, 98, 101, 104, 97, 118, 105, 111, 114, 32, 111, 102, 97, 110, 100, 32, 98, 101, 99, 97, 117, 115, 101, 97, 110, 100, 32, 97, 110, 111, 116, 104, 101, 114, 97, 112, 112, 101, 97, 114, 101, 100, 32, 111, 110, 114, 101, 99, 111, 114, 100, 101, 100, 32, 105, 110, 98, 108, 97, 99, 107, 38, 113, 117, 111, 116, 59, 109, 97, 121, 32, 105, 110, 99, 108, 117, 100, 101, 116, 104, 101, 32, 119, 111, 114, 108, 100, 39, 115, 99, 97, 110, 32, 108, 101, 97, 100, 32, 116, 111, 114, 101, 102, 101, 114, 115, 32, 116, 111, 32, 97, 98, 111, 114, 100, 101, 114, 61, 34, 48, 34, 32, 103, 111, 118, 101, 114, 110, 109, 101, 110, 116, 32, 119, 105, 110, 110, 105, 110, 103, 32, 116, 104, 101, 114, 101, 115, 117, 108, 116, 101, 100, 32, 105, 110, 32, 119, 104, 105, 108, 101, 32, 116, 104, 101, 32, 87, 97, 115, 104, 105, 110, 103, 116, 111, 110, 44, 116, 104, 101, 32, 115, 117, 98, 106, 101, 99, 116, 99, 105, 116, 121, 32, 105, 110, 32, 116, 104, 101, 62, 60, 47, 100, 105, 118, 62, 13, 10, 9, 9, 114, 101, 102, 108, 101, 99, 116, 32, 116, 104, 101, 116, 111, 32, 99, 111, 109, 112, 108, 101, 116, 101, 98, 101, 99, 97, 109, 101, 32, 109, 111, 114, 101, 114, 97, 100, 105, 111, 97, 99, 116, 105, 118, 101, 114, 101, 106, 101, 99, 116, 101, 100, 32, 98, 121, 119, 105, 116, 104, 111, 117, 116, 32, 97, 110, 121, 104, 105, 115, 32, 102, 97, 116, 104, 101, 114, 44, 119, 104, 105, 99, 104, 32, 99, 111, 117, 108, 100, 99, 111, 112, 121, 32, 111, 102, 32, 116, 104, 101, 116, 111, 32, 105, 110, 100, 105, 99, 97, 116, 101, 97, 32, 112, 111, 108, 105, 116, 105, 99, 97, 108, 97, 99, 99, 111, 117, 110, 116, 115, 32, 111, 102, 99, 111, 110, 115, 116, 105, 116, 117, 116, 101, 115, 119, 111, 114, 107, 101, 100, 32, 119, 105, 116, 104, 101, 114, 60, 47, 97, 62, 60, 47, 108, 105, 62, 111, 102, 32, 104, 105, 115, 32, 108, 105, 102, 101, 97, 99, 99, 111, 109, 112, 97, 110, 105, 101, 100, 99, 108, 105, 101, 110, 116, 87, 105, 100, 116, 104, 112, 114, 101, 118, 101, 110, 116, 32, 116, 104, 101, 76, 101, 103, 105, 115, 108, 97, 116, 105, 118, 101, 100, 105, 102, 102, 101, 114, 101, 110, 116, 108, 121, 116, 111, 103, 101, 116, 104, 101, 114, 32, 105, 110, 104, 97, 115, 32, 115, 101, 118, 101, 114, 97, 108, 102, 111, 114, 32, 97, 110, 111, 116, 104, 101, 114, 116, 101, 120, 116, 32, 111, 102, 32, 116, 104, 101, 102, 111, 117, 110, 100, 101, 100, 32, 116, 104, 101, 101, 32, 119, 105, 116, 104, 32, 116, 104, 101, 32, 105, 115, 32, 117, 115, 101, 100, 32, 102, 111, 114, 99, 104, 97, 110, 103, 101, 100, 32, 116, 104, 101, 117, 115, 117, 97, 108, 108, 121, 32, 116, 104, 101, 112, 108, 97, 99, 101, 32, 119, 104, 101, 114, 101, 119, 104, 101, 114, 101, 97, 115, 32, 116, 104, 101, 62, 32, 60, 97, 32, 104, 114, 101, 102, 61, 34, 34, 62, 60, 97, 32, 104, 114, 101, 102, 61, 34, 116, 104, 101, 109, 115, 101, 108, 118, 101, 115, 44, 97, 108, 116, 104, 111, 117, 103, 104, 32, 104, 101, 116, 104, 97, 116, 32, 99, 97, 110, 32, 98, 101, 116, 114, 97, 100, 105, 116, 105, 111, 110, 97, 108, 114, 111, 108, 101, 32, 111, 102, 32, 116, 104, 101, 97, 115, 32, 97, 32, 114, 101, 115, 117, 108, 116, 114, 101, 109, 111, 118, 101, 67, 104, 105, 108, 100, 100, 101, 115, 105, 103, 110, 101, 100, 32, 98, 121, 119, 101, 115, 116, 32, 111, 102, 32, 116, 104, 101, 83, 111, 109, 101, 32, 112, 101, 111, 112, 108, 101, 112, 114, 111, 100, 117, 99, 116, 105, 111, 110, 44, 115, 105, 100, 101, 32, 111, 102, 32, 116, 104, 101, 110, 101, 119, 115, 108, 101, 116, 116, 101, 114, 115, 117, 115, 101, 100, 32, 98, 121, 32, 116, 104, 101, 100, 111, 119, 110, 32, 116, 111, 32, 116, 104, 101, 97, 99, 99, 101, 112, 116, 101, 100, 32, 98, 121, 108, 105, 118, 101, 32, 105, 110, 32, 116, 104, 101, 97, 116, 116, 101, 109, 112, 116, 115, 32, 116, 111, 111, 117, 116, 115, 105, 100, 101, 32, 116, 104, 101, 102, 114, 101, 113, 117, 101, 110, 99, 105, 101, 115, 72, 111, 119, 101, 118, 101, 114, 44, 32, 105, 110, 112, 114, 111, 103, 114, 97, 109, 109, 101, 114, 115, 97, 116, 32, 108, 101, 97, 115, 116, 32, 105, 110, 97, 112, 112, 114, 111, 120, 105, 109, 97, 116, 101, 97, 108, 116, 104, 111, 117, 103, 104, 32, 105, 116, 119, 97, 115, 32, 112, 97, 114, 116, 32, 111, 102, 97, 110, 100, 32, 118, 97, 114, 105, 111, 117, 115, 71, 111, 118, 101, 114, 110, 111, 114, 32, 111, 102, 116, 104, 101, 32, 97, 114, 116, 105, 99, 108, 101, 116, 117, 114, 110, 101, 100, 32, 105, 110, 116, 111, 62, 60, 97, 32, 104, 114, 101, 102, 61, 34, 47, 116, 104, 101, 32, 101, 99, 111, 110, 111, 109, 121, 105, 115, 32, 116, 104, 101, 32, 109, 111, 115, 116, 109, 111, 115, 116, 32, 119, 105, 100, 101, 108, 121, 119, 111, 117, 108, 100, 32, 108, 97, 116, 101, 114, 97, 110, 100, 32, 112, 101, 114, 104, 97, 112, 115, 114, 105, 115, 101, 32, 116, 111, 32, 116, 104, 101, 111, 99, 99, 117, 114, 115, 32, 119, 104, 101, 110, 117, 110, 100, 101, 114, 32, 119, 104, 105, 99, 104, 99, 111, 110, 100, 105, 116, 105, 111, 110, 115, 46, 116, 104, 101, 32, 119, 101, 115, 116, 101, 114, 110, 116, 104, 101, 111, 114, 121, 32, 116, 104, 97, 116, 105, 115, 32, 112, 114, 111, 100, 117, 99, 101, 100, 116, 104, 101, 32, 99, 105, 116, 121, 32, 111, 102, 105, 110, 32, 119, 104, 105, 99, 104, 32, 104, 101, 115, 101, 101, 110, 32, 105, 110, 32, 116, 104, 101, 116, 104, 101, 32, 99, 101, 110, 116, 114, 97, 108, 98, 117, 105, 108, 100, 105, 110, 103, 32, 111, 102, 109, 97, 110, 121, 32, 111, 102, 32, 104, 105, 115, 97, 114, 101, 97, 32, 111, 102, 32, 116, 104, 101, 105, 115, 32, 116, 104, 101, 32, 111, 110, 108, 121, 109, 111, 115, 116, 32, 111, 102, 32, 116, 104, 101, 109, 97, 110, 121, 32, 111, 102, 32, 116, 104, 101, 116, 104, 101, 32, 87, 101, 115, 116, 101, 114, 110, 84, 104, 101, 114, 101, 32, 105, 115, 32, 110, 111, 101, 120, 116, 101, 110, 100, 101, 100, 32, 116, 111, 83, 116, 97, 116, 105, 115, 116, 105, 99, 97, 108, 99, 111, 108, 115, 112, 97, 110, 61, 50, 32, 124, 115, 104, 111, 114, 116, 32, 115, 116, 111, 114, 121, 112, 111, 115, 115, 105, 98, 108, 101, 32, 116, 111, 116, 111, 112, 111, 108, 111, 103, 105, 99, 97, 108, 99, 114, 105, 116, 105, 99, 97, 108, 32, 111, 102, 114, 101, 112, 111, 114, 116, 101, 100, 32, 116, 111, 97, 32, 67, 104, 114, 105, 115, 116, 105, 97, 110, 100, 101, 99, 105, 115, 105, 111, 110, 32, 116, 111, 105, 115, 32, 101, 113, 117, 97, 108, 32, 116, 111, 112, 114, 111, 98, 108, 101, 109, 115, 32, 111, 102, 84, 104, 105, 115, 32, 99, 97, 110, 32, 98, 101, 109, 101, 114, 99, 104, 97, 110, 100, 105, 115, 101, 102, 111, 114, 32, 109, 111, 115, 116, 32, 111, 102, 110, 111, 32, 101, 118, 105, 100, 101, 110, 99, 101, 101, 100, 105, 116, 105, 111, 110, 115, 32, 111, 102, 101, 108, 101, 109, 101, 110, 116, 115, 32, 105, 110, 38, 113, 117, 111, 116, 59, 46, 32, 84, 104, 101, 99, 111, 109, 47, 105, 109, 97, 103, 101, 115, 47, 119, 104, 105, 99, 104, 32, 109, 97, 107, 101, 115, 116, 104, 101, 32, 112, 114, 111, 99, 101, 115, 115, 114, 101, 109, 97, 105, 110, 115, 32, 116, 104, 101, 108, 105, 116, 101, 114, 97, 116, 117, 114, 101, 44, 105, 115, 32, 97, 32, 109, 101, 109, 98, 101, 114, 116, 104, 101, 32, 112, 111, 112, 117, 108, 97, 114, 116, 104, 101, 32, 97, 110, 99, 105, 101, 110, 116, 112, 114, 111, 98, 108, 101, 109, 115, 32, 105, 110, 116, 105, 109, 101, 32, 111, 102, 32, 116, 104, 101, 100, 101, 102, 101, 97, 116, 101, 100, 32, 98, 121, 98, 111, 100, 121, 32, 111, 102, 32, 116, 104, 101, 97, 32, 102, 101, 119, 32, 121, 101, 97, 114, 115, 109, 117, 99, 104, 32, 111, 102, 32, 116, 104, 101, 116, 104, 101, 32, 119, 111, 114, 107, 32, 111, 102, 67, 97, 108, 105, 102, 111, 114, 110, 105, 97, 44, 115, 101, 114, 118, 101, 100, 32, 97, 115, 32, 97, 103, 111, 118, 101, 114, 110, 109, 101, 110, 116, 46, 99, 111, 110, 99, 101, 112, 116, 115, 32, 111, 102, 109, 111, 118, 101, 109, 101, 110, 116, 32, 105, 110, 9, 9, 60, 100, 105, 118, 32, 105, 100, 61, 34, 105, 116, 34, 32, 118, 97, 108, 117, 101, 61, 34, 108, 97, 110, 103, 117, 97, 103, 101, 32, 111, 102, 97, 115, 32, 116, 104, 101, 121, 32, 97, 114, 101, 112, 114, 111, 100, 117, 99, 101, 100, 32, 105, 110, 105, 115, 32, 116, 104, 97, 116, 32, 116, 104, 101, 101, 120, 112, 108, 97, 105, 110, 32, 116, 104, 101, 100, 105, 118, 62, 60, 47, 100, 105, 118, 62, 10, 72, 111, 119, 101, 118, 101, 114, 32, 116, 104, 101, 108, 101, 97, 100, 32, 116, 111, 32, 116, 104, 101, 9, 60, 97, 32, 104, 114, 101, 102, 61, 34, 47, 119, 97, 115, 32, 103, 114, 97, 110, 116, 101, 100, 112, 101, 111, 112, 108, 101, 32, 104, 97, 118, 101, 99, 111, 110, 116, 105, 110, 117, 97, 108, 108, 121, 119, 97, 115, 32, 115, 101, 101, 110, 32, 97, 115, 97, 110, 100, 32, 114, 101, 108, 97, 116, 101, 100, 116, 104, 101, 32, 114, 111, 108, 101, 32, 111, 102, 112, 114, 111, 112, 111, 115, 101, 100, 32, 98, 121, 111, 102, 32, 116, 104, 101, 32, 98, 101, 115, 116, 101, 97, 99, 104, 32, 111, 116, 104, 101, 114, 46, 67, 111, 110, 115, 116, 97, 110, 116, 105, 110, 101, 112, 101, 111, 112, 108, 101, 32, 102, 114, 111, 109, 100, 105, 97, 108, 101, 99, 116, 115, 32, 111, 102, 116, 111, 32, 114, 101, 118, 105, 115, 105, 111, 110, 119, 97, 115, 32, 114, 101, 110, 97, 109, 101, 100, 97, 32, 115, 111, 117, 114, 99, 101, 32, 111, 102, 116, 104, 101, 32, 105, 110, 105, 116, 105, 97, 108, 108, 97, 117, 110, 99, 104, 101, 100, 32, 105, 110, 112, 114, 111, 118, 105, 100, 101, 32, 116, 104, 101, 116, 111, 32, 116, 104, 101, 32, 119, 101, 115, 116, 119, 104, 101, 114, 101, 32, 116, 104, 101, 114, 101, 97, 110, 100, 32, 115, 105, 109, 105, 108, 97, 114, 98, 101, 116, 119, 101, 101, 110, 32, 116, 119, 111, 105, 115, 32, 97, 108, 115, 111, 32, 116, 104, 101, 69, 110, 103, 108, 105, 115, 104, 32, 97, 110, 100, 99, 111, 110, 100, 105, 116, 105, 111, 110, 115, 44, 116, 104, 97, 116, 32, 105, 116, 32, 119, 97, 115, 101, 110, 116, 105, 116, 108, 101, 100, 32, 116, 111, 116, 104, 101, 109, 115, 101, 108, 118, 101, 115, 46, 113, 117, 97, 110, 116, 105, 116, 121, 32, 111, 102, 114, 97, 110, 115, 112, 97, 114, 101, 110, 99, 121, 116, 104, 101, 32, 115, 97, 109, 101, 32, 97, 115, 116, 111, 32, 106, 111, 105, 110, 32, 116, 104, 101, 99, 111, 117, 110, 116, 114, 121, 32, 97, 110, 100, 116, 104, 105, 115, 32, 105, 115, 32, 116, 104, 101, 84, 104, 105, 115, 32, 108, 101, 100, 32, 116, 111, 97, 32, 115, 116, 97, 116, 101, 109, 101, 110, 116, 99, 111, 110, 116, 114, 97, 115, 116, 32, 116, 111, 108, 97, 115, 116, 73, 110, 100, 101, 120, 79, 102, 116, 104, 114, 111, 117, 103, 104, 32, 104, 105, 115, 105, 115, 32, 100, 101, 115, 105, 103, 110, 101, 100, 116, 104, 101, 32, 116, 101, 114, 109, 32, 105, 115, 105, 115, 32, 112, 114, 111, 118, 105, 100, 101, 100, 112, 114, 111, 116, 101, 99, 116, 32, 116, 104, 101, 110, 103, 60, 47, 97, 62, 60, 47, 108, 105, 62, 84, 104, 101, 32, 99, 117, 114, 114, 101, 110, 116, 116, 104, 101, 32, 115, 105, 116, 101, 32, 111, 102, 115, 117, 98, 115, 116, 97, 110, 116, 105, 97, 108, 101, 120, 112, 101, 114, 105, 101, 110, 99, 101, 44, 105, 110, 32, 116, 104, 101, 32, 87, 101, 115, 116, 116, 104, 101, 121, 32, 115, 104, 111, 117, 108, 100, 115, 108, 111, 118, 101, 110, 196, 141, 105, 110, 97, 99, 111, 109, 101, 110, 116, 97, 114, 105, 111, 115, 117, 110, 105, 118, 101, 114, 115, 105, 100, 97, 100, 99, 111, 110, 100, 105, 99, 105, 111, 110, 101, 115, 97, 99, 116, 105, 118, 105, 100, 97, 100, 101, 115, 101, 120, 112, 101, 114, 105, 101, 110, 99, 105, 97, 116, 101, 99, 110, 111, 108, 111, 103, 195, 173, 97, 112, 114, 111, 100, 117, 99, 99, 105, 195, 179, 110, 112, 117, 110, 116, 117, 97, 99, 105, 195, 179, 110, 97, 112, 108, 105, 99, 97, 99, 105, 195, 179, 110, 99, 111, 110, 116, 114, 97, 115, 101, 195, 177, 97, 99, 97, 116, 101, 103, 111, 114, 195, 173, 97, 115, 114, 101, 103, 105, 115, 116, 114, 97, 114, 115, 101, 112, 114, 111, 102, 101, 115, 105, 111, 110, 97, 108, 116, 114, 97, 116, 97, 109, 105, 101, 110, 116, 111, 114, 101, 103, 195, 173, 115, 116, 114, 97, 116, 101, 115, 101, 99, 114, 101, 116, 97, 114, 195, 173, 97, 112, 114, 105, 110, 99, 105, 112, 97, 108, 101, 115, 112, 114, 111, 116, 101, 99, 99, 105, 195, 179, 110, 105, 109, 112, 111, 114, 116, 97, 110, 116, 101, 115, 105, 109, 112, 111, 114, 116, 97, 110, 99, 105, 97, 112, 111, 115, 105, 98, 105, 108, 105, 100, 97, 100, 105, 110, 116, 101, 114, 101, 115, 97, 110, 116, 101, 99, 114, 101, 99, 105, 109, 105, 101, 110, 116, 111, 110, 101, 99, 101, 115, 105, 100, 97, 100, 101, 115, 115, 117, 115, 99, 114, 105, 98, 105, 114, 115, 101, 97, 115, 111, 99, 105, 97, 99, 105, 195, 179, 110, 100, 105, 115, 112, 111, 110, 105, 98, 108, 101, 115, 101, 118, 97, 108, 117, 97, 99, 105, 195, 179, 110, 101, 115, 116, 117, 100, 105, 97, 110, 116, 101, 115, 114, 101, 115, 112, 111, 110, 115, 97, 98, 108, 101, 114, 101, 115, 111, 108, 117, 99, 105, 195, 179, 110, 103, 117, 97, 100, 97, 108, 97, 106, 97, 114, 97, 114, 101, 103, 105, 115, 116, 114, 97, 100, 111, 115, 111, 112, 111, 114, 116, 117, 110, 105, 100, 97, 100, 99, 111, 109, 101, 114, 99, 105, 97, 108, 101, 115, 102, 111, 116, 111, 103, 114, 97, 102, 195, 173, 97, 97, 117, 116, 111, 114, 105, 100, 97, 100, 101, 115, 105, 110, 103, 101, 110, 105, 101, 114, 195, 173, 97, 116, 101, 108, 101, 118, 105, 115, 105, 195, 179, 110, 99, 111, 109, 112, 101, 116, 101, 110, 99, 105, 97, 111, 112, 101, 114, 97, 99, 105, 111, 110, 101, 115, 101, 115, 116, 97, 98, 108, 101, 99, 105, 100, 111, 115, 105, 109, 112, 108, 101, 109, 101, 110, 116, 101, 97, 99, 116, 117, 97, 108, 109, 101, 110, 116, 101, 110, 97, 118, 101, 103, 97, 99, 105, 195, 179, 110, 99, 111, 110, 102, 111, 114, 109, 105, 100, 97, 100, 108, 105, 110, 101, 45, 104, 101, 105, 103, 104, 116, 58, 102, 111, 110, 116, 45, 102, 97, 109, 105, 108, 121, 58, 34, 32, 58, 32, 34, 104, 116, 116, 112, 58, 47, 47, 97, 112, 112, 108, 105, 99, 97, 116, 105, 111, 110, 115, 108, 105, 110, 107, 34, 32, 104, 114, 101, 102, 61, 34, 115, 112, 101, 99, 105, 102, 105, 99, 97, 108, 108, 121, 47, 47, 60, 33, 91, 67, 68, 65, 84, 65, 91, 10, 79, 114, 103, 97, 110, 105, 122, 97, 116, 105, 111, 110, 100, 105, 115, 116, 114, 105, 98, 117, 116, 105, 111, 110, 48, 112, 120, 59, 32, 104, 101, 105, 103, 104, 116, 58, 114, 101, 108, 97, 116, 105, 111, 110, 115, 104, 105, 112, 100, 101, 118, 105, 99, 101, 45, 119, 105, 100, 116, 104, 60, 100, 105, 118, 32, 99, 108, 97, 115, 115, 61, 34, 60, 108, 97, 98, 101, 108, 32, 102, 111, 114, 61, 34, 114, 101, 103, 105, 115, 116, 114, 97, 116, 105, 111, 110, 60, 47, 110, 111, 115, 99, 114, 105, 112, 116, 62, 10, 47, 105, 110, 100, 101, 120, 46, 104, 116, 109, 108, 34, 119, 105, 110, 100, 111, 119, 46, 111, 112, 101, 110, 40, 32, 33, 105, 109, 112, 111, 114, 116, 97, 110, 116, 59, 97, 112, 112, 108, 105, 99, 97, 116, 105, 111, 110, 47, 105, 110, 100, 101, 112, 101, 110, 100, 101, 110, 99, 101, 47, 47, 119, 119, 119, 46, 103, 111, 111, 103, 108, 101, 111, 114, 103, 97, 110, 105, 122, 97, 116, 105, 111, 110, 97, 117, 116, 111, 99, 111, 109, 112, 108, 101, 116, 101, 114, 101, 113, 117, 105, 114, 101, 109, 101, 110, 116, 115, 99, 111, 110, 115, 101, 114, 118, 97, 116, 105, 118, 101, 60, 102, 111, 114, 109, 32, 110, 97, 109, 101, 61, 34, 105, 110, 116, 101, 108, 108, 101, 99, 116, 117, 97, 108, 109, 97, 114, 103, 105, 110, 45, 108, 101, 102, 116, 58, 49, 56, 116, 104, 32, 99, 101, 110, 116, 117, 114, 121, 97, 110, 32, 105, 109, 112, 111, 114, 116, 97, 110, 116, 105, 110, 115, 116, 105, 116, 117, 116, 105, 111, 110, 115, 97, 98, 98, 114, 101, 118, 105, 97, 116, 105, 111, 110, 60, 105, 109, 103, 32, 99, 108, 97, 115, 115, 61, 34, 111, 114, 103, 97, 110, 105, 115, 97, 116, 105, 111, 110, 99, 105, 118, 105, 108, 105, 122, 97, 116, 105, 111, 110, 49, 57, 116, 104, 32, 99, 101, 110, 116, 117, 114, 121, 97, 114, 99, 104, 105, 116, 101, 99, 116, 117, 114, 101, 105, 110, 99, 111, 114, 112, 111, 114, 97, 116, 101, 100, 50, 48, 116, 104, 32, 99, 101, 110, 116, 117, 114, 121, 45, 99, 111, 110, 116, 97, 105, 110, 101, 114, 34, 62, 109, 111, 115, 116, 32, 110, 111, 116, 97, 98, 108, 121, 47, 62, 60, 47, 97, 62, 60, 47, 100, 105, 118, 62, 110, 111, 116, 105, 102, 105, 99, 97, 116, 105, 111, 110, 39, 117, 110, 100, 101, 102, 105, 110, 101, 100, 39, 41, 70, 117, 114, 116, 104, 101, 114, 109, 111, 114, 101, 44, 98, 101, 108, 105, 101, 118, 101, 32, 116, 104, 97, 116, 105, 110, 110, 101, 114, 72, 84, 77, 76, 32, 61, 32, 112, 114, 105, 111, 114, 32, 116, 111, 32, 116, 104, 101, 100, 114, 97, 109, 97, 116, 105, 99, 97, 108, 108, 121, 114, 101, 102, 101, 114, 114, 105, 110, 103, 32, 116, 111, 110, 101, 103, 111, 116, 105, 97, 116, 105, 111, 110, 115, 104, 101, 97, 100, 113, 117, 97, 114, 116, 101, 114, 115, 83, 111, 117, 116, 104, 32, 65, 102, 114, 105, 99, 97, 117, 110, 115, 117, 99, 99, 101, 115, 115, 102, 117, 108, 80, 101, 110, 110, 115, 121, 108, 118, 97, 110, 105, 97, 65, 115, 32, 97, 32, 114, 101, 115, 117, 108, 116, 44, 60, 104, 116, 109, 108, 32, 108, 97, 110, 103, 61, 34, 38, 108, 116, 59, 47, 115, 117, 112, 38, 103, 116, 59, 100, 101, 97, 108, 105, 110, 103, 32, 119, 105, 116, 104, 112, 104, 105, 108, 97, 100, 101, 108, 112, 104, 105, 97, 104, 105, 115, 116, 111, 114, 105, 99, 97, 108, 108, 121, 41, 59, 60, 47, 115, 99, 114, 105, 112, 116, 62, 10, 112, 97, 100, 100, 105, 110, 103, 45, 116, 111, 112, 58, 101, 120, 112, 101, 114, 105, 109, 101, 110, 116, 97, 108, 103, 101, 116, 65, 116, 116, 114, 105, 98, 117, 116, 101, 105, 110, 115, 116, 114, 117, 99, 116, 105, 111, 110, 115, 116, 101, 99, 104, 110, 111, 108, 111, 103, 105, 101, 115, 112, 97, 114, 116, 32, 111, 102, 32, 116, 104, 101, 32, 61, 102, 117, 110, 99, 116, 105, 111, 110, 40, 41, 123, 115, 117, 98, 115, 99, 114, 105, 112, 116, 105, 111, 110, 108, 46, 100, 116, 100, 34, 62, 13, 10, 60, 104, 116, 103, 101, 111, 103, 114, 97, 112, 104, 105, 99, 97, 108, 67, 111, 110, 115, 116, 105, 116, 117, 116, 105, 111, 110, 39, 44, 32, 102, 117, 110, 99, 116, 105, 111, 110, 40, 115, 117, 112, 112, 111, 114, 116, 101, 100, 32, 98, 121, 97, 103, 114, 105, 99, 117, 108, 116, 117, 114, 97, 108, 99, 111, 110, 115, 116, 114, 117, 99, 116, 105, 111, 110, 112, 117, 98, 108, 105, 99, 97, 116, 105, 111, 110, 115, 102, 111, 110, 116, 45, 115, 105, 122, 101, 58, 32, 49, 97, 32, 118, 97, 114, 105, 101, 116, 121, 32, 111, 102, 60, 100, 105, 118, 32, 115, 116, 121, 108, 101, 61, 34, 69, 110, 99, 121, 99, 108, 111, 112, 101, 100, 105, 97, 105, 102, 114, 97, 109, 101, 32, 115, 114, 99, 61, 34, 100, 101, 109, 111, 110, 115, 116, 114, 97, 116, 101, 100, 97, 99, 99, 111, 109, 112, 108, 105, 115, 104, 101, 100, 117, 110, 105, 118, 101, 114, 115, 105, 116, 105, 101, 115, 68, 101, 109, 111, 103, 114, 97, 112, 104, 105, 99, 115, 41, 59, 60, 47, 115, 99, 114, 105, 112, 116, 62, 60, 100, 101, 100, 105, 99, 97, 116, 101, 100, 32, 116, 111, 107, 110, 111, 119, 108, 101, 100, 103, 101, 32, 111, 102, 115, 97, 116, 105, 115, 102, 97, 99, 116, 105, 111, 110, 112, 97, 114, 116, 105, 99, 117, 108, 97, 114, 108, 121, 60, 47, 100, 105, 118, 62, 60, 47, 100, 105, 118, 62, 69, 110, 103, 108, 105, 115, 104, 32, 40, 85, 83, 41, 97, 112, 112, 101, 110, 100, 67, 104, 105, 108, 100, 40, 116, 114, 97, 110, 115, 109, 105, 115, 115, 105, 111, 110, 115, 46, 32, 72, 111, 119, 101, 118, 101, 114, 44, 32, 105, 110, 116, 101, 108, 108, 105, 103, 101, 110, 99, 101, 34, 32, 116, 97, 98, 105, 110, 100, 101, 120, 61, 34, 102, 108, 111, 97, 116, 58, 114, 105, 103, 104, 116, 59, 67, 111, 109, 109, 111, 110, 119, 101, 97, 108, 116, 104, 114, 97, 110, 103, 105, 110, 103, 32, 102, 114, 111, 109, 105, 110, 32, 119, 104, 105, 99, 104, 32, 116, 104, 101, 97, 116, 32, 108, 101, 97, 115, 116, 32, 111, 110, 101, 114, 101, 112, 114, 111, 100, 117, 99, 116, 105, 111, 110, 101, 110, 99, 121, 99, 108, 111, 112, 101, 100, 105, 97, 59, 102, 111, 110, 116, 45, 115, 105, 122, 101, 58, 49, 106, 117, 114, 105, 115, 100, 105, 99, 116, 105, 111, 110, 97, 116, 32, 116, 104, 97, 116, 32, 116, 105, 109, 101, 34, 62, 60, 97, 32, 99, 108, 97, 115, 115, 61, 34, 73, 110, 32, 97, 100, 100, 105, 116, 105, 111, 110, 44, 100, 101, 115, 99, 114, 105, 112, 116, 105, 111, 110, 43, 99, 111, 110, 118, 101, 114, 115, 97, 116, 105, 111, 110, 99, 111, 110, 116, 97, 99, 116, 32, 119, 105, 116, 104, 105, 115, 32, 103, 101, 110, 101, 114, 97, 108, 108, 121, 114, 34, 32, 99, 111, 110, 116, 101, 110, 116, 61, 34, 114, 101, 112, 114, 101, 115, 101, 110, 116, 105, 110, 103, 38, 108, 116, 59, 109, 97, 116, 104, 38, 103, 116, 59, 112, 114, 101, 115, 101, 110, 116, 97, 116, 105, 111, 110, 111, 99, 99, 97, 115, 105, 111, 110, 97, 108, 108, 121, 60, 105, 109, 103, 32, 119, 105, 100, 116, 104, 61, 34, 110, 97, 118, 105, 103, 97, 116, 105, 111, 110, 34, 62, 99, 111, 109, 112, 101, 110, 115, 97, 116, 105, 111, 110, 99, 104, 97, 109, 112, 105, 111, 110, 115, 104, 105, 112, 109, 101, 100, 105, 97, 61, 34, 97, 108, 108, 34, 32, 118, 105, 111, 108, 97, 116, 105, 111, 110, 32, 111, 102, 114, 101, 102, 101, 114, 101, 110, 99, 101, 32, 116, 111, 114, 101, 116, 117, 114, 110, 32, 116, 114, 117, 101, 59, 83, 116, 114, 105, 99, 116, 47, 47, 69, 78, 34, 32, 116, 114, 97, 110, 115, 97, 99, 116, 105, 111, 110, 115, 105, 110, 116, 101, 114, 118, 101, 110, 116, 105, 111, 110, 118, 101, 114, 105, 102, 105, 99, 97, 116, 105, 111, 110, 73, 110, 102, 111, 114, 109, 97, 116, 105, 111, 110, 32, 100, 105, 102, 102, 105, 99, 117, 108, 116, 105, 101, 115, 67, 104, 97, 109, 112, 105, 111, 110, 115, 104, 105, 112, 99, 97, 112, 97, 98, 105, 108, 105, 116, 105, 101, 115, 60, 33, 91, 101, 110, 100, 105, 102, 93, 45, 45, 62, 125, 10, 60, 47, 115, 99, 114, 105, 112, 116, 62, 10, 67, 104, 114, 105, 115, 116, 105, 97, 110, 105, 116, 121, 102, 111, 114, 32, 101, 120, 97, 109, 112, 108, 101, 44, 80, 114, 111, 102, 101, 115, 115, 105, 111, 110, 97, 108, 114, 101, 115, 116, 114, 105, 99, 116, 105, 111, 110, 115, 115, 117, 103, 103, 101, 115, 116, 32, 116, 104, 97, 116, 119, 97, 115, 32, 114, 101, 108, 101, 97, 115, 101, 100, 40, 115, 117, 99, 104, 32, 97, 115, 32, 116, 104, 101, 114, 101, 109, 111, 118, 101, 67, 108, 97, 115, 115, 40, 117, 110, 101, 109, 112, 108, 111, 121, 109, 101, 110, 116, 116, 104, 101, 32, 65, 109, 101, 114, 105, 99, 97, 110, 115, 116, 114, 117, 99, 116, 117, 114, 101, 32, 111, 102, 47, 105, 110, 100, 101, 120, 46, 104, 116, 109, 108, 32, 112, 117, 98, 108, 105, 115, 104, 101, 100, 32, 105, 110, 115, 112, 97, 110, 32, 99, 108, 97, 115, 115, 61, 34, 34, 62, 60, 97, 32, 104, 114, 101, 102, 61, 34, 47, 105, 110, 116, 114, 111, 100, 117, 99, 116, 105, 111, 110, 98, 101, 108, 111, 110, 103, 105, 110, 103, 32, 116, 111, 99, 108, 97, 105, 109, 101, 100, 32, 116, 104, 97, 116, 99, 111, 110, 115, 101, 113, 117, 101, 110, 99, 101, 115, 60, 109, 101, 116, 97, 32, 110, 97, 109, 101, 61, 34, 71, 117, 105, 100, 101, 32, 116, 111, 32, 116, 104, 101, 111, 118, 101, 114, 119, 104, 101, 108, 109, 105, 110, 103, 97, 103, 97, 105, 110, 115, 116, 32, 116, 104, 101, 32, 99, 111, 110, 99, 101, 110, 116, 114, 97, 116, 101, 100, 44, 10, 46, 110, 111, 110, 116, 111, 117, 99, 104, 32, 111, 98, 115, 101, 114, 118, 97, 116, 105, 111, 110, 115, 60, 47, 97, 62, 10, 60, 47, 100, 105, 118, 62, 10, 102, 32, 40, 100, 111, 99, 117, 109, 101, 110, 116, 46, 98, 111, 114, 100, 101, 114, 58, 32, 49, 112, 120, 32, 123, 102, 111, 110, 116, 45, 115, 105, 122, 101, 58, 49, 116, 114, 101, 97, 116, 109, 101, 110, 116, 32, 111, 102, 48, 34, 32, 104, 101, 105, 103, 104, 116, 61, 34, 49, 109, 111, 100, 105, 102, 105, 99, 97, 116, 105, 111, 110, 73, 110, 100, 101, 112, 101, 110, 100, 101, 110, 99, 101, 100, 105, 118, 105, 100, 101, 100, 32, 105, 110, 116, 111, 103, 114, 101, 97, 116, 101, 114, 32, 116, 104, 97, 110, 97, 99, 104, 105, 101, 118, 101, 109, 101, 110, 116, 115, 101, 115, 116, 97, 98, 108, 105, 115, 104, 105, 110, 103, 74, 97, 118, 97, 83, 99, 114, 105, 112, 116, 34, 32, 110, 101, 118, 101, 114, 116, 104, 101, 108, 101, 115, 115, 115, 105, 103, 110, 105, 102, 105, 99, 97, 110, 99, 101, 66, 114, 111, 97, 100, 99, 97, 115, 116, 105, 110, 103, 62, 38, 110, 98, 115, 112, 59, 60, 47, 116, 100, 62, 99, 111, 110, 116, 97, 105, 110, 101, 114, 34, 62, 10, 115, 117, 99, 104, 32, 97, 115, 32, 116, 104, 101, 32, 105, 110, 102, 108, 117, 101, 110, 99, 101, 32, 111, 102, 97, 32, 112, 97, 114, 116, 105, 99, 117, 108, 97, 114, 115, 114, 99, 61, 39, 104, 116, 116, 112, 58, 47, 47, 110, 97, 118, 105, 103, 97, 116, 105, 111, 110, 34, 32, 104, 97, 108, 102, 32, 111, 102, 32, 116, 104, 101, 32, 115, 117, 98, 115, 116, 97, 110, 116, 105, 97, 108, 32, 38, 110, 98, 115, 112, 59, 60, 47, 100, 105, 118, 62, 97, 100, 118, 97, 110, 116, 97, 103, 101, 32, 111, 102, 100, 105, 115, 99, 111, 118, 101, 114, 121, 32, 111, 102, 102, 117, 110, 100, 97, 109, 101, 110, 116, 97, 108, 32, 109, 101, 116, 114, 111, 112, 111, 108, 105, 116, 97, 110, 116, 104, 101, 32, 111, 112, 112, 111, 115, 105, 116, 101, 34, 32, 120, 109, 108, 58, 108, 97, 110, 103, 61, 34, 100, 101, 108, 105, 98, 101, 114, 97, 116, 101, 108, 121, 97, 108, 105, 103, 110, 61, 99, 101, 110, 116, 101, 114, 101, 118, 111, 108, 117, 116, 105, 111, 110, 32, 111, 102, 112, 114, 101, 115, 101, 114, 118, 97, 116, 105, 111, 110, 105, 109, 112, 114, 111, 118, 101, 109, 101, 110, 116, 115, 98, 101, 103, 105, 110, 110, 105, 110, 103, 32, 105, 110, 74, 101, 115, 117, 115, 32, 67, 104, 114, 105, 115, 116, 80, 117, 98, 108, 105, 99, 97, 116, 105, 111, 110, 115, 100, 105, 115, 97, 103, 114, 101, 101, 109, 101, 110, 116, 116, 101, 120, 116, 45, 97, 108, 105, 103, 110, 58, 114, 44, 32, 102, 117, 110, 99, 116, 105, 111, 110, 40, 41, 115, 105, 109, 105, 108, 97, 114, 105, 116, 105, 101, 115, 98, 111, 100, 121, 62, 60, 47, 104, 116, 109, 108, 62, 105, 115, 32, 99, 117, 114, 114, 101, 110, 116, 108, 121, 97, 108, 112, 104, 97, 98, 101, 116, 105, 99, 97, 108, 105, 115, 32, 115, 111, 109, 101, 116, 105, 109, 101, 115, 116, 121, 112, 101, 61, 34, 105, 109, 97, 103, 101, 47, 109, 97, 110, 121, 32, 111, 102, 32, 116, 104, 101, 32, 102, 108, 111, 119, 58, 104, 105, 100, 100, 101, 110, 59, 97, 118, 97, 105, 108, 97, 98, 108, 101, 32, 105, 110, 100, 101, 115, 99, 114, 105, 98, 101, 32, 116, 104, 101, 101, 120, 105, 115, 116, 101, 110, 99, 101, 32, 111, 102, 97, 108, 108, 32, 111, 118, 101, 114, 32, 116, 104, 101, 116, 104, 101, 32, 73, 110, 116, 101, 114, 110, 101, 116, 9, 60, 117, 108, 32, 99, 108, 97, 115, 115, 61, 34, 105, 110, 115, 116, 97, 108, 108, 97, 116, 105, 111, 110, 110, 101, 105, 103, 104, 98, 111, 114, 104, 111, 111, 100, 97, 114, 109, 101, 100, 32, 102, 111, 114, 99, 101, 115, 114, 101, 100, 117, 99, 105, 110, 103, 32, 116, 104, 101, 99, 111, 110, 116, 105, 110, 117, 101, 115, 32, 116, 111, 78, 111, 110, 101, 116, 104, 101, 108, 101, 115, 115, 44, 116, 101, 109, 112, 101, 114, 97, 116, 117, 114, 101, 115, 10, 9, 9, 60, 97, 32, 104, 114, 101, 102, 61, 34, 99, 108, 111, 115, 101, 32, 116, 111, 32, 116, 104, 101, 101, 120, 97, 109, 112, 108, 101, 115, 32, 111, 102, 32, 105, 115, 32, 97, 98, 111, 117, 116, 32, 116, 104, 101, 40, 115, 101, 101, 32, 98, 101, 108, 111, 119, 41, 46, 34, 32, 105, 100, 61, 34, 115, 101, 97, 114, 99, 104, 112, 114, 111, 102, 101, 115, 115, 105, 111, 110, 97, 108, 105, 115, 32, 97, 118, 97, 105, 108, 97, 98, 108, 101, 116, 104, 101, 32, 111, 102, 102, 105, 99, 105, 97, 108, 9, 9, 60, 47, 115, 99, 114, 105, 112, 116, 62, 10, 10, 9, 9, 60, 100, 105, 118, 32, 105, 100, 61, 34, 97, 99, 99, 101, 108, 101, 114, 97, 116, 105, 111, 110, 116, 104, 114, 111, 117, 103, 104, 32, 116, 104, 101, 32, 72, 97, 108, 108, 32, 111, 102, 32, 70, 97, 109, 101, 100, 101, 115, 99, 114, 105, 112, 116, 105, 111, 110, 115, 116, 114, 97, 110, 115, 108, 97, 116, 105, 111, 110, 115, 105, 110, 116, 101, 114, 102, 101, 114, 101, 110, 99, 101, 32, 116, 121, 112, 101, 61, 39, 116, 101, 120, 116, 47, 114, 101, 99, 101, 110, 116, 32, 121, 101, 97, 114, 115, 105, 110, 32, 116, 104, 101, 32, 119, 111, 114, 108, 100, 118, 101, 114, 121, 32, 112, 111, 112, 117, 108, 97, 114, 123, 98, 97, 99, 107, 103, 114, 111, 117, 110, 100, 58, 116, 114, 97, 100, 105, 116, 105, 111, 110, 97, 108, 32, 115, 111, 109, 101, 32, 111, 102, 32, 116, 104, 101, 32, 99, 111, 110, 110, 101, 99, 116, 101, 100, 32, 116, 111, 101, 120, 112, 108, 111, 105, 116, 97, 116, 105, 111, 110, 101, 109, 101, 114, 103, 101, 110, 99, 101, 32, 111, 102, 99, 111, 110, 115, 116, 105, 116, 117, 116, 105, 111, 110, 65, 32, 72, 105, 115, 116, 111, 114, 121, 32, 111, 102, 115, 105, 103, 110, 105, 102, 105, 99, 97, 110, 116, 32, 109, 97, 110, 117, 102, 97, 99, 116, 117, 114, 101, 100, 101, 120, 112, 101, 99, 116, 97, 116, 105, 111, 110, 115, 62, 60, 110, 111, 115, 99, 114, 105, 112, 116, 62, 60, 99, 97, 110, 32, 98, 101, 32, 102, 111, 117, 110, 100, 98, 101, 99, 97, 117, 115, 101, 32, 116, 104, 101, 32, 104, 97, 115, 32, 110, 111, 116, 32, 98, 101, 101, 110, 110, 101, 105, 103, 104, 98, 111, 117, 114, 105, 110, 103, 119, 105, 116, 104, 111, 117, 116, 32, 116, 104, 101, 32, 97, 100, 100, 101, 100, 32, 116, 111, 32, 116, 104, 101, 9, 60, 108, 105, 32, 99, 108, 97, 115, 115, 61, 34, 105, 110, 115, 116, 114, 117, 109, 101, 110, 116, 97, 108, 83, 111, 118, 105, 101, 116, 32, 85, 110, 105, 111, 110, 97, 99, 107, 110, 111, 119, 108, 101, 100, 103, 101, 100, 119, 104, 105, 99, 104, 32, 99, 97, 110, 32, 98, 101, 110, 97, 109, 101, 32, 102, 111, 114, 32, 116, 104, 101, 97, 116, 116, 101, 110, 116, 105, 111, 110, 32, 116, 111, 97, 116, 116, 101, 109, 112, 116, 115, 32, 116, 111, 32, 100, 101, 118, 101, 108, 111, 112, 109, 101, 110, 116, 115, 73, 110, 32, 102, 97, 99, 116, 44, 32, 116, 104, 101, 60, 108, 105, 32, 99, 108, 97, 115, 115, 61, 34, 97, 105, 109, 112, 108, 105, 99, 97, 116, 105, 111, 110, 115, 115, 117, 105, 116, 97, 98, 108, 101, 32, 102, 111, 114, 109, 117, 99, 104, 32, 111, 102, 32, 116, 104, 101, 32, 99, 111, 108, 111, 110, 105, 122, 97, 116, 105, 111, 110, 112, 114, 101, 115, 105, 100, 101, 110, 116, 105, 97, 108, 99, 97, 110, 99, 101, 108, 66, 117, 98, 98, 108, 101, 32, 73, 110, 102, 111, 114, 109, 97, 116, 105, 111, 110, 109, 111, 115, 116, 32, 111, 102, 32, 116, 104, 101, 32, 105, 115, 32, 100, 101, 115, 99, 114, 105, 98, 101, 100, 114, 101, 115, 116, 32, 111, 102, 32, 116, 104, 101, 32, 109, 111, 114, 101, 32, 111, 114, 32, 108, 101, 115, 115, 105, 110, 32, 83, 101, 112, 116, 101, 109, 98, 101, 114, 73, 110, 116, 101, 108, 108, 105, 103, 101, 110, 99, 101, 115, 114, 99, 61, 34, 104, 116, 116, 112, 58, 47, 47, 112, 120, 59, 32, 104, 101, 105, 103, 104, 116, 58, 32, 97, 118, 97, 105, 108, 97, 98, 108, 101, 32, 116, 111, 109, 97, 110, 117, 102, 97, 99, 116, 117, 114, 101, 114, 104, 117, 109, 97, 110, 32, 114, 105, 103, 104, 116, 115, 108, 105, 110, 107, 32, 104, 114, 101, 102, 61, 34, 47, 97, 118, 97, 105, 108, 97, 98, 105, 108, 105, 116, 121, 112, 114, 111, 112, 111, 114, 116, 105, 111, 110, 97, 108, 111, 117, 116, 115, 105, 100, 101, 32, 116, 104, 101, 32, 97, 115, 116, 114, 111, 110, 111, 109, 105, 99, 97, 108, 104, 117, 109, 97, 110, 32, 98, 101, 105, 110, 103, 115, 110, 97, 109, 101, 32, 111, 102, 32, 116, 104, 101, 32, 97, 114, 101, 32, 102, 111, 117, 110, 100, 32, 105, 110, 97, 114, 101, 32, 98, 97, 115, 101, 100, 32, 111, 110, 115, 109, 97, 108, 108, 101, 114, 32, 116, 104, 97, 110, 97, 32, 112, 101, 114, 115, 111, 110, 32, 119, 104, 111, 101, 120, 112, 97, 110, 115, 105, 111, 110, 32, 111, 102, 97, 114, 103, 117, 105, 110, 103, 32, 116, 104, 97, 116, 110, 111, 119, 32, 107, 110, 111, 119, 110, 32, 97, 115, 73, 110, 32, 116, 104, 101, 32, 101, 97, 114, 108, 121, 105, 110, 116, 101, 114, 109, 101, 100, 105, 97, 116, 101, 100, 101, 114, 105, 118, 101, 100, 32, 102, 114, 111, 109, 83, 99, 97, 110, 100, 105, 110, 97, 118, 105, 97, 110, 60, 47, 97, 62, 60, 47, 100, 105, 118, 62, 13, 10, 99, 111, 110, 115, 105, 100, 101, 114, 32, 116, 104, 101, 97, 110, 32, 101, 115, 116, 105, 109, 97, 116, 101, 100, 116, 104, 101, 32, 78, 97, 116, 105, 111, 110, 97, 108, 60, 100, 105, 118, 32, 105, 100, 61, 34, 112, 97, 103, 114, 101, 115, 117, 108, 116, 105, 110, 103, 32, 105, 110, 99, 111, 109, 109, 105, 115, 115, 105, 111, 110, 101, 100, 97, 110, 97, 108, 111, 103, 111, 117, 115, 32, 116, 111, 97, 114, 101, 32, 114, 101, 113, 117, 105, 114, 101, 100, 47, 117, 108, 62, 10, 60, 47, 100, 105, 118, 62, 10, 119, 97, 115, 32, 98, 97, 115, 101, 100, 32, 111, 110, 97, 110, 100, 32, 98, 101, 99, 97, 109, 101, 32, 97, 38, 110, 98, 115, 112, 59, 38, 110, 98, 115, 112, 59, 116, 34, 32, 118, 97, 108, 117, 101, 61, 34, 34, 32, 119, 97, 115, 32, 99, 97, 112, 116, 117, 114, 101, 100, 110, 111, 32, 109, 111, 114, 101, 32, 116, 104, 97, 110, 114, 101, 115, 112, 101, 99, 116, 105, 118, 101, 108, 121, 99, 111, 110, 116, 105, 110, 117, 101, 32, 116, 111, 32, 62, 13, 10, 60, 104, 101, 97, 100, 62, 13, 10, 60, 119, 101, 114, 101, 32, 99, 114, 101, 97, 116, 101, 100, 109, 111, 114, 101, 32, 103, 101, 110, 101, 114, 97, 108, 105, 110, 102, 111, 114, 109, 97, 116, 105, 111, 110, 32, 117, 115, 101, 100, 32, 102, 111, 114, 32, 116, 104, 101, 105, 110, 100, 101, 112, 101, 110, 100, 101, 110, 116, 32, 116, 104, 101, 32, 73, 109, 112, 101, 114, 105, 97, 108, 99, 111, 109, 112, 111, 110, 101, 110, 116, 32, 111, 102, 116, 111, 32, 116, 104, 101, 32, 110, 111, 114, 116, 104, 105, 110, 99, 108, 117, 100, 101, 32, 116, 104, 101, 32, 67, 111, 110, 115, 116, 114, 117, 99, 116, 105, 111, 110, 115, 105, 100, 101, 32, 111, 102, 32, 116, 104, 101, 32, 119, 111, 117, 108, 100, 32, 110, 111, 116, 32, 98, 101, 102, 111, 114, 32, 105, 110, 115, 116, 97, 110, 99, 101, 105, 110, 118, 101, 110, 116, 105, 111, 110, 32, 111, 102, 109, 111, 114, 101, 32, 99, 111, 109, 112, 108, 101, 120, 99, 111, 108, 108, 101, 99, 116, 105, 118, 101, 108, 121, 98, 97, 99, 107, 103, 114, 111, 117, 110, 100, 58, 32, 116, 101, 120, 116, 45, 97, 108, 105, 103, 110, 58, 32, 105, 116, 115, 32, 111, 114, 105, 103, 105, 110, 97, 108, 105, 110, 116, 111, 32, 97, 99, 99, 111, 117, 110, 116, 116, 104, 105, 115, 32, 112, 114, 111, 99, 101, 115, 115, 97, 110, 32, 101, 120, 116, 101, 110, 115, 105, 118, 101, 104, 111, 119, 101, 118, 101, 114, 44, 32, 116, 104, 101, 116, 104, 101, 121, 32, 97, 114, 101, 32, 110, 111, 116, 114, 101, 106, 101, 99, 116, 101, 100, 32, 116, 104, 101, 99, 114, 105, 116, 105, 99, 105, 115, 109, 32, 111, 102, 100, 117, 114, 105, 110, 103, 32, 119, 104, 105, 99, 104, 112, 114, 111, 98, 97, 98, 108, 121, 32, 116, 104, 101, 116, 104, 105, 115, 32, 97, 114, 116, 105, 99, 108, 101, 40, 102, 117, 110, 99, 116, 105, 111, 110, 40, 41, 123, 73, 116, 32, 115, 104, 111, 117, 108, 100, 32, 98, 101, 97, 110, 32, 97, 103, 114, 101, 101, 109, 101, 110, 116, 97, 99, 99, 105, 100, 101, 110, 116, 97, 108, 108, 121, 100, 105, 102, 102, 101, 114, 115, 32, 102, 114, 111, 109, 65, 114, 99, 104, 105, 116, 101, 99, 116, 117, 114, 101, 98, 101, 116, 116, 101, 114, 32, 107, 110, 111, 119, 110, 97, 114, 114, 97, 110, 103, 101, 109, 101, 110, 116, 115, 105, 110, 102, 108, 117, 101, 110, 99, 101, 32, 111, 110, 97, 116, 116, 101, 110, 100, 101, 100, 32, 116, 104, 101, 105, 100, 101, 110, 116, 105, 99, 97, 108, 32, 116, 111, 115, 111, 117, 116, 104, 32, 111, 102, 32, 116, 104, 101, 112, 97, 115, 115, 32, 116, 104, 114, 111, 117, 103, 104, 120, 109, 108, 34, 32, 116, 105, 116, 108, 101, 61, 34, 119, 101, 105, 103, 104, 116, 58, 98, 111, 108, 100, 59, 99, 114, 101, 97, 116, 105, 110, 103, 32, 116, 104, 101, 100, 105, 115, 112, 108, 97, 121, 58, 110, 111, 110, 101, 114, 101, 112, 108, 97, 99, 101, 100, 32, 116, 104, 101, 60, 105, 109, 103, 32, 115, 114, 99, 61, 34, 47, 105, 104, 116, 116, 112, 115, 58, 47, 47, 119, 119, 119, 46, 87, 111, 114, 108, 100, 32, 87, 97, 114, 32, 73, 73, 116, 101, 115, 116, 105, 109, 111, 110, 105, 97, 108, 115, 102, 111, 117, 110, 100, 32, 105, 110, 32, 116, 104, 101, 114, 101, 113, 117, 105, 114, 101, 100, 32, 116, 111, 32, 97, 110, 100, 32, 116, 104, 97, 116, 32, 116, 104, 101, 98, 101, 116, 119, 101, 101, 110, 32, 116, 104, 101, 32, 119, 97, 115, 32, 100, 101, 115, 105, 103, 110, 101, 100, 99, 111, 110, 115, 105, 115, 116, 115, 32, 111, 102, 32, 99, 111, 110, 115, 105, 100, 101, 114, 97, 98, 108, 121, 112, 117, 98, 108, 105, 115, 104, 101, 100, 32, 98, 121, 116, 104, 101, 32, 108, 97, 110, 103, 117, 97, 103, 101, 67, 111, 110, 115, 101, 114, 118, 97, 116, 105, 111, 110, 99, 111, 110, 115, 105, 115, 116, 101, 100, 32, 111, 102, 114, 101, 102, 101, 114, 32, 116, 111, 32, 116, 104, 101, 98, 97, 99, 107, 32, 116, 111, 32, 116, 104, 101, 32, 99, 115, 115, 34, 32, 109, 101, 100, 105, 97, 61, 34, 80, 101, 111, 112, 108, 101, 32, 102, 114, 111, 109, 32, 97, 118, 97, 105, 108, 97, 98, 108, 101, 32, 111, 110, 112, 114, 111, 118, 101, 100, 32, 116, 111, 32, 98, 101, 115, 117, 103, 103, 101, 115, 116, 105, 111, 110, 115, 34, 119, 97, 115, 32, 107, 110, 111, 119, 110, 32, 97, 115, 118, 97, 114, 105, 101, 116, 105, 101, 115, 32, 111, 102, 108, 105, 107, 101, 108, 121, 32, 116, 111, 32, 98, 101, 99, 111, 109, 112, 114, 105, 115, 101, 100, 32, 111, 102, 115, 117, 112, 112, 111, 114, 116, 32, 116, 104, 101, 32, 104, 97, 110, 100, 115, 32, 111, 102, 32, 116, 104, 101, 99, 111, 117, 112, 108, 101, 100, 32, 119, 105, 116, 104, 99, 111, 110, 110, 101, 99, 116, 32, 97, 110, 100, 32, 98, 111, 114, 100, 101, 114, 58, 110, 111, 110, 101, 59, 112, 101, 114, 102, 111, 114, 109, 97, 110, 99, 101, 115, 98, 101, 102, 111, 114, 101, 32, 98, 101, 105, 110, 103, 108, 97, 116, 101, 114, 32, 98, 101, 99, 97, 109, 101, 99, 97, 108, 99, 117, 108, 97, 116, 105, 111, 110, 115, 111, 102, 116, 101, 110, 32, 99, 97, 108, 108, 101, 100, 114, 101, 115, 105, 100, 101, 110, 116, 115, 32, 111, 102, 109, 101, 97, 110, 105, 110, 103, 32, 116, 104, 97, 116, 62, 60, 108, 105, 32, 99, 108, 97, 115, 115, 61, 34, 101, 118, 105, 100, 101, 110, 99, 101, 32, 102, 111, 114, 101, 120, 112, 108, 97, 110, 97, 116, 105, 111, 110, 115, 101, 110, 118, 105, 114, 111, 110, 109, 101, 110, 116, 115, 34, 62, 60, 47, 97, 62, 60, 47, 100, 105, 118, 62, 119, 104, 105, 99, 104, 32, 97, 108, 108, 111, 119, 115, 73, 110, 116, 114, 111, 100, 117, 99, 116, 105, 111, 110, 100, 101, 118, 101, 108, 111, 112, 101, 100, 32, 98, 121, 97, 32, 119, 105, 100, 101, 32, 114, 97, 110, 103, 101, 111, 110, 32, 98, 101, 104, 97, 108, 102, 32, 111, 102, 118, 97, 108, 105, 103, 110, 61, 34, 116, 111, 112, 34, 112, 114, 105, 110, 99, 105, 112, 108, 101, 32, 111, 102, 97, 116, 32, 116, 104, 101, 32, 116, 105, 109, 101, 44, 60, 47, 110, 111, 115, 99, 114, 105, 112, 116, 62, 13, 115, 97, 105, 100, 32, 116, 111, 32, 104, 97, 118, 101, 105, 110, 32, 116, 104, 101, 32, 102, 105, 114, 115, 116, 119, 104, 105, 108, 101, 32, 111, 116, 104, 101, 114, 115, 104, 121, 112, 111, 116, 104, 101, 116, 105, 99, 97, 108, 112, 104, 105, 108, 111, 115, 111, 112, 104, 101, 114, 115, 112, 111, 119, 101, 114, 32, 111, 102, 32, 116, 104, 101, 99, 111, 110, 116, 97, 105, 110, 101, 100, 32, 105, 110, 112, 101, 114, 102, 111, 114, 109, 101, 100, 32, 98, 121, 105, 110, 97, 98, 105, 108, 105, 116, 121, 32, 116, 111, 119, 101, 114, 101, 32, 119, 114, 105, 116, 116, 101, 110, 115, 112, 97, 110, 32, 115, 116, 121, 108, 101, 61, 34, 105, 110, 112, 117, 116, 32, 110, 97, 109, 101, 61, 34, 116, 104, 101, 32, 113, 117, 101, 115, 116, 105, 111, 110, 105, 110, 116, 101, 110, 100, 101, 100, 32, 102, 111, 114, 114, 101, 106, 101, 99, 116, 105, 111, 110, 32, 111, 102, 105, 109, 112, 108, 105, 101, 115, 32, 116, 104, 97, 116, 105, 110, 118, 101, 110, 116, 101, 100, 32, 116, 104, 101, 116, 104, 101, 32, 115, 116, 97, 110, 100, 97, 114, 100, 119, 97, 115, 32, 112, 114, 111, 98, 97, 98, 108, 121, 108, 105, 110, 107, 32, 98, 101, 116, 119, 101, 101, 110, 112, 114, 111, 102, 101, 115, 115, 111, 114, 32, 111, 102, 105, 110, 116, 101, 114, 97, 99, 116, 105, 111, 110, 115, 99, 104, 97, 110, 103, 105, 110, 103, 32, 116, 104, 101, 73, 110, 100, 105, 97, 110, 32, 79, 99, 101, 97, 110, 32, 99, 108, 97, 115, 115, 61, 34, 108, 97, 115, 116, 119, 111, 114, 107, 105, 110, 103, 32, 119, 105, 116, 104, 39, 104, 116, 116, 112, 58, 47, 47, 119, 119, 119, 46, 121, 101, 97, 114, 115, 32, 98, 101, 102, 111, 114, 101, 84, 104, 105, 115, 32, 119, 97, 115, 32, 116, 104, 101, 114, 101, 99, 114, 101, 97, 116, 105, 111, 110, 97, 108, 101, 110, 116, 101, 114, 105, 110, 103, 32, 116, 104, 101, 109, 101, 97, 115, 117, 114, 101, 109, 101, 110, 116, 115, 97, 110, 32, 101, 120, 116, 114, 101, 109, 101, 108, 121, 118, 97, 108, 117, 101, 32, 111, 102, 32, 116, 104, 101, 115, 116, 97, 114, 116, 32, 111, 102, 32, 116, 104, 101, 10, 60, 47, 115, 99, 114, 105, 112, 116, 62, 10, 10, 97, 110, 32, 101, 102, 102, 111, 114, 116, 32, 116, 111, 105, 110, 99, 114, 101, 97, 115, 101, 32, 116, 104, 101, 116, 111, 32, 116, 104, 101, 32, 115, 111, 117, 116, 104, 115, 112, 97, 99, 105, 110, 103, 61, 34, 48, 34, 62, 115, 117, 102, 102, 105, 99, 105, 101, 110, 116, 108, 121, 116, 104, 101, 32, 69, 117, 114, 111, 112, 101, 97, 110, 99, 111, 110, 118, 101, 114, 116, 101, 100, 32, 116, 111, 99, 108, 101, 97, 114, 84, 105, 109, 101, 111, 117, 116, 100, 105, 100, 32, 110, 111, 116, 32, 104, 97, 118, 101, 99, 111, 110, 115, 101, 113, 117, 101, 110, 116, 108, 121, 102, 111, 114, 32, 116, 104, 101, 32, 110, 101, 120, 116, 101, 120, 116, 101, 110, 115, 105, 111, 110, 32, 111, 102, 101, 99, 111, 110, 111, 109, 105, 99, 32, 97, 110, 100, 97, 108, 116, 104, 111, 117, 103, 104, 32, 116, 104, 101, 97, 114, 101, 32, 112, 114, 111, 100, 117, 99, 101, 100, 97, 110, 100, 32, 119, 105, 116, 104, 32, 116, 104, 101, 105, 110, 115, 117, 102, 102, 105, 99, 105, 101, 110, 116, 103, 105, 118, 101, 110, 32, 98, 121, 32, 116, 104, 101, 115, 116, 97, 116, 105, 110, 103, 32, 116, 104, 97, 116, 101, 120, 112, 101, 110, 100, 105, 116, 117, 114, 101, 115, 60, 47, 115, 112, 97, 110, 62, 60, 47, 97, 62, 10, 116, 104, 111, 117, 103, 104, 116, 32, 116, 104, 97, 116, 111, 110, 32, 116, 104, 101, 32, 98, 97, 115, 105, 115, 99, 101, 108, 108, 112, 97, 100, 100, 105, 110, 103, 61, 105, 109, 97, 103, 101, 32, 111, 102, 32, 116, 104, 101, 114, 101, 116, 117, 114, 110, 105, 110, 103, 32, 116, 111, 105, 110, 102, 111, 114, 109, 97, 116, 105, 111, 110, 44, 115, 101, 112, 97, 114, 97, 116, 101, 100, 32, 98, 121, 97, 115, 115, 97, 115, 115, 105, 110, 97, 116, 101, 100, 115, 34, 32, 99, 111, 110, 116, 101, 110, 116, 61, 34, 97, 117, 116, 104, 111, 114, 105, 116, 121, 32, 111, 102, 110, 111, 114, 116, 104, 119, 101, 115, 116, 101, 114, 110, 60, 47, 100, 105, 118, 62, 10, 60, 100, 105, 118, 32, 34, 62, 60, 47, 100, 105, 118, 62, 13, 10, 32, 32, 99, 111, 110, 115, 117, 108, 116, 97, 116, 105, 111, 110, 99, 111, 109, 109, 117, 110, 105, 116, 121, 32, 111, 102, 116, 104, 101, 32, 110, 97, 116, 105, 111, 110, 97, 108, 105, 116, 32, 115, 104, 111, 117, 108, 100, 32, 98, 101, 112, 97, 114, 116, 105, 99, 105, 112, 97, 110, 116, 115, 32, 97, 108, 105, 103, 110, 61, 34, 108, 101, 102, 116, 116, 104, 101, 32, 103, 114, 101, 97, 116, 101, 115, 116, 115, 101, 108, 101, 99, 116, 105, 111, 110, 32, 111, 102, 115, 117, 112, 101, 114, 110, 97, 116, 117, 114, 97, 108, 100, 101, 112, 101, 110, 100, 101, 110, 116, 32, 111, 110, 105, 115, 32, 109, 101, 110, 116, 105, 111, 110, 101, 100, 97, 108, 108, 111, 119, 105, 110, 103, 32, 116, 104, 101, 119, 97, 115, 32, 105, 110, 118, 101, 110, 116, 101, 100, 97, 99, 99, 111, 109, 112, 97, 110, 121, 105, 110, 103, 104, 105, 115, 32, 112, 101, 114, 115, 111, 110, 97, 108, 97, 118, 97, 105, 108, 97, 98, 108, 101, 32, 97, 116, 115, 116, 117, 100, 121, 32, 111, 102, 32, 116, 104, 101, 111, 110, 32, 116, 104, 101, 32, 111, 116, 104, 101, 114, 101, 120, 101, 99, 117, 116, 105, 111, 110, 32, 111, 102, 72, 117, 109, 97, 110, 32, 82, 105, 103, 104, 116, 115, 116, 101, 114, 109, 115, 32, 111, 102, 32, 116, 104, 101, 97, 115, 115, 111, 99, 105, 97, 116, 105, 111, 110, 115, 114, 101, 115, 101, 97, 114, 99, 104, 32, 97, 110, 100, 115, 117, 99, 99, 101, 101, 100, 101, 100, 32, 98, 121, 100, 101, 102, 101, 97, 116, 101, 100, 32, 116, 104, 101, 97, 110, 100, 32, 102, 114, 111, 109, 32, 116, 104, 101, 98, 117, 116, 32, 116, 104, 101, 121, 32, 97, 114, 101, 99, 111, 109, 109, 97, 110, 100, 101, 114, 32, 111, 102, 115, 116, 97, 116, 101, 32, 111, 102, 32, 116, 104, 101, 121, 101, 97, 114, 115, 32, 111, 102, 32, 97, 103, 101, 116, 104, 101, 32, 115, 116, 117, 100, 121, 32, 111, 102, 60, 117, 108, 32, 99, 108, 97, 115, 115, 61, 34, 115, 112, 108, 97, 99, 101, 32, 105, 110, 32, 116, 104, 101, 119, 104, 101, 114, 101, 32, 104, 101, 32, 119, 97, 115, 60, 108, 105, 32, 99, 108, 97, 115, 115, 61, 34, 102, 116, 104, 101, 114, 101, 32, 97, 114, 101, 32, 110, 111, 119, 104, 105, 99, 104, 32, 98, 101, 99, 97, 109, 101, 104, 101, 32, 112, 117, 98, 108, 105, 115, 104, 101, 100, 101, 120, 112, 114, 101, 115, 115, 101, 100, 32, 105, 110, 116, 111, 32, 119, 104, 105, 99, 104, 32, 116, 104, 101, 99, 111, 109, 109, 105, 115, 115, 105, 111, 110, 101, 114, 102, 111, 110, 116, 45, 119, 101, 105, 103, 104, 116, 58, 116, 101, 114, 114, 105, 116, 111, 114, 121, 32, 111, 102, 101, 120, 116, 101, 110, 115, 105, 111, 110, 115, 34, 62, 82, 111, 109, 97, 110, 32, 69, 109, 112, 105, 114, 101, 101, 113, 117, 97, 108, 32, 116, 111, 32, 116, 104, 101, 73, 110, 32, 99, 111, 110, 116, 114, 97, 115, 116, 44, 104, 111, 119, 101, 118, 101, 114, 44, 32, 97, 110, 100, 105, 115, 32, 116, 121, 112, 105, 99, 97, 108, 108, 121, 97, 110, 100, 32, 104, 105, 115, 32, 119, 105, 102, 101, 40, 97, 108, 115, 111, 32, 99, 97, 108, 108, 101, 100, 62, 60, 117, 108, 32, 99, 108, 97, 115, 115, 61, 34, 101, 102, 102, 101, 99, 116, 105, 118, 101, 108, 121, 32, 101, 118, 111, 108, 118, 101, 100, 32, 105, 110, 116, 111, 115, 101, 101, 109, 32, 116, 111, 32, 104, 97, 118, 101, 119, 104, 105, 99, 104, 32, 105, 115, 32, 116, 104, 101, 116, 104, 101, 114, 101, 32, 119, 97, 115, 32, 110, 111, 97, 110, 32, 101, 120, 99, 101, 108, 108, 101, 110, 116, 97, 108, 108, 32, 111, 102, 32, 116, 104, 101, 115, 101, 100, 101, 115, 99, 114, 105, 98, 101, 100, 32, 98, 121, 73, 110, 32, 112, 114, 97, 99, 116, 105, 99, 101, 44, 98, 114, 111, 97, 100, 99, 97, 115, 116, 105, 110, 103, 99, 104, 97, 114, 103, 101, 100, 32, 119, 105, 116, 104, 114, 101, 102, 108, 101, 99, 116, 101, 100, 32, 105, 110, 115, 117, 98, 106, 101, 99, 116, 101, 100, 32, 116, 111, 109, 105, 108, 105, 116, 97, 114, 121, 32, 97, 110, 100, 116, 111, 32, 116, 104, 101, 32, 112, 111, 105, 110, 116, 101, 99, 111, 110, 111, 109, 105, 99, 97, 108, 108, 121, 115, 101, 116, 84, 97, 114, 103, 101, 116, 105, 110, 103, 97, 114, 101, 32, 97, 99, 116, 117, 97, 108, 108, 121, 118, 105, 99, 116, 111, 114, 121, 32, 111, 118, 101, 114, 40, 41, 59, 60, 47, 115, 99, 114, 105, 112, 116, 62, 99, 111, 110, 116, 105, 110, 117, 111, 117, 115, 108, 121, 114, 101, 113, 117, 105, 114, 101, 100, 32, 102, 111, 114, 101, 118, 111, 108, 117, 116, 105, 111, 110, 97, 114, 121, 97, 110, 32, 101, 102, 102, 101, 99, 116, 105, 118, 101, 110, 111, 114, 116, 104, 32, 111, 102, 32, 116, 104, 101, 44, 32, 119, 104, 105, 99, 104, 32, 119, 97, 115, 32, 102, 114, 111, 110, 116, 32, 111, 102, 32, 116, 104, 101, 111, 114, 32, 111, 116, 104, 101, 114, 119, 105, 115, 101, 115, 111, 109, 101, 32, 102, 111, 114, 109, 32, 111, 102, 104, 97, 100, 32, 110, 111, 116, 32, 98, 101, 101, 110, 103, 101, 110, 101, 114, 97, 116, 101, 100, 32, 98, 121, 105, 110, 102, 111, 114, 109, 97, 116, 105, 111, 110, 46, 112, 101, 114, 109, 105, 116, 116, 101, 100, 32, 116, 111, 105, 110, 99, 108, 117, 100, 101, 115, 32, 116, 104, 101, 100, 101, 118, 101, 108, 111, 112, 109, 101, 110, 116, 44, 101, 110, 116, 101, 114, 101, 100, 32, 105, 110, 116, 111, 116, 104, 101, 32, 112, 114, 101, 118, 105, 111, 117, 115, 99, 111, 110, 115, 105, 115, 116, 101, 110, 116, 108, 121, 97, 114, 101, 32, 107, 110, 111, 119, 110, 32, 97, 115, 116, 104, 101, 32, 102, 105, 101, 108, 100, 32, 111, 102, 116, 104, 105, 115, 32, 116, 121, 112, 101, 32, 111, 102, 103, 105, 118, 101, 110, 32, 116, 111, 32, 116, 104, 101, 116, 104, 101, 32, 116, 105, 116, 108, 101, 32, 111, 102, 99, 111, 110, 116, 97, 105, 110, 115, 32, 116, 104, 101, 105, 110, 115, 116, 97, 110, 99, 101, 115, 32, 111, 102, 105, 110, 32, 116, 104, 101, 32, 110, 111, 114, 116, 104, 100, 117, 101, 32, 116, 111, 32, 116, 104, 101, 105, 114, 97, 114, 101, 32, 100, 101, 115, 105, 103, 110, 101, 100, 99, 111, 114, 112, 111, 114, 97, 116, 105, 111, 110, 115, 119, 97, 115, 32, 116, 104, 97, 116, 32, 116, 104, 101, 111, 110, 101, 32, 111, 102, 32, 116, 104, 101, 115, 101, 109, 111, 114, 101, 32, 112, 111, 112, 117, 108, 97, 114, 115, 117, 99, 99, 101, 101, 100, 101, 100, 32, 105, 110, 115, 117, 112, 112, 111, 114, 116, 32, 102, 114, 111, 109, 105, 110, 32, 100, 105, 102, 102, 101, 114, 101, 110, 116, 100, 111, 109, 105, 110, 97, 116, 101, 100, 32, 98, 121, 100, 101, 115, 105, 103, 110, 101, 100, 32, 102, 111, 114, 111, 119, 110, 101, 114, 115, 104, 105, 112, 32, 111, 102, 97, 110, 100, 32, 112, 111, 115, 115, 105, 98, 108, 121, 115, 116, 97, 110, 100, 97, 114, 100, 105, 122, 101, 100, 114, 101, 115, 112, 111, 110, 115, 101, 84, 101, 120, 116, 119, 97, 115, 32, 105, 110, 116, 101, 110, 100, 101, 100, 114, 101, 99, 101, 105, 118, 101, 100, 32, 116, 104, 101, 97, 115, 115, 117, 109, 101, 100, 32, 116, 104, 97, 116, 97, 114, 101, 97, 115, 32, 111, 102, 32, 116, 104, 101, 112, 114, 105, 109, 97, 114, 105, 108, 121, 32, 105, 110, 116, 104, 101, 32, 98, 97, 115, 105, 115, 32, 111, 102, 105, 110, 32, 116, 104, 101, 32, 115, 101, 110, 115, 101, 97, 99, 99, 111, 117, 110, 116, 115, 32, 102, 111, 114, 100, 101, 115, 116, 114, 111, 121, 101, 100, 32, 98, 121, 97, 116, 32, 108, 101, 97, 115, 116, 32, 116, 119, 111, 119, 97, 115, 32, 100, 101, 99, 108, 97, 114, 101, 100, 99, 111, 117, 108, 100, 32, 110, 111, 116, 32, 98, 101, 83, 101, 99, 114, 101, 116, 97, 114, 121, 32, 111, 102, 97, 112, 112, 101, 97, 114, 32, 116, 111, 32, 98, 101, 109, 97, 114, 103, 105, 110, 45, 116, 111, 112, 58, 49, 47, 94, 92, 115, 43, 124, 92, 115, 43, 36, 47, 103, 101, 41, 123, 116, 104, 114, 111, 119, 32, 101, 125, 59, 116, 104, 101, 32, 115, 116, 97, 114, 116, 32, 111, 102, 116, 119, 111, 32, 115, 101, 112, 97, 114, 97, 116, 101, 108, 97, 110, 103, 117, 97, 103, 101, 32, 97, 110, 100, 119, 104, 111, 32, 104, 97, 100, 32, 98, 101, 101, 110, 111, 112, 101, 114, 97, 116, 105, 111, 110, 32, 111, 102, 100, 101, 97, 116, 104, 32, 111, 102, 32, 116, 104, 101, 114, 101, 97, 108, 32, 110, 117, 109, 98, 101, 114, 115, 9, 60, 108, 105, 110, 107, 32, 114, 101, 108, 61, 34, 112, 114, 111, 118, 105, 100, 101, 100, 32, 116, 104, 101, 116, 104, 101, 32, 115, 116, 111, 114, 121, 32, 111, 102, 99, 111, 109, 112, 101, 116, 105, 116, 105, 111, 110, 115, 101, 110, 103, 108, 105, 115, 104, 32, 40, 85, 75, 41, 101, 110, 103, 108, 105, 115, 104, 32, 40, 85, 83, 41, 208, 156, 208, 190, 208, 189, 208, 179, 208, 190, 208, 187, 208, 161, 209, 128, 208, 191, 209, 129, 208, 186, 208, 184, 209, 129, 209, 128, 208, 191, 209, 129, 208, 186, 208, 184, 209, 129, 209, 128, 208, 191, 209, 129, 208, 186, 208, 190, 217, 132, 216, 185, 216, 177, 216, 168, 217, 138, 216, 169, 230, 173, 163, 233, 171, 148, 228, 184, 173, 230, 150, 135, 231, 174, 128, 228, 189, 147, 228, 184, 173, 230, 150, 135, 231, 185, 129, 228, 189, 147, 228, 184, 173, 230, 150, 135, 230, 156, 137, 233, 153, 144, 229, 133, 172, 229, 143, 184, 228, 186, 186, 230, 176, 145, 230, 148, 191, 229, 186, 156, 233, 152, 191, 233, 135, 140, 229, 183, 180, 229, 183, 180, 231, 164, 190, 228, 188, 154, 228, 184, 187, 228, 185, 137, 230, 147, 141, 228, 189, 156, 231, 179, 187, 231, 187, 159, 230, 148, 191, 231, 173, 150, 230, 179, 149, 232, 167, 132, 105, 110, 102, 111, 114, 109, 97, 99, 105, 195, 179, 110, 104, 101, 114, 114, 97, 109, 105, 101, 110, 116, 97, 115, 101, 108, 101, 99, 116, 114, 195, 179, 110, 105, 99, 111, 100, 101, 115, 99, 114, 105, 112, 99, 105, 195, 179, 110, 99, 108, 97, 115, 105, 102, 105, 99, 97, 100, 111, 115, 99, 111, 110, 111, 99, 105, 109, 105, 101, 110, 116, 111, 112, 117, 98, 108, 105, 99, 97, 99, 105, 195, 179, 110, 114, 101, 108, 97, 99, 105, 111, 110, 97, 100, 97, 115, 105, 110, 102, 111, 114, 109, 195, 161, 116, 105, 99, 97, 114, 101, 108, 97, 99, 105, 111, 110, 97, 100, 111, 115, 100, 101, 112, 97, 114, 116, 97, 109, 101, 110, 116, 111, 116, 114, 97, 98, 97, 106, 97, 100, 111, 114, 101, 115, 100, 105, 114, 101, 99, 116, 97, 109, 101, 110, 116, 101, 97, 121, 117, 110, 116, 97, 109, 105, 101, 110, 116, 111, 109, 101, 114, 99, 97, 100, 111, 76, 105, 98, 114, 101, 99, 111, 110, 116, 195, 161, 99, 116, 101, 110, 111, 115, 104, 97, 98, 105, 116, 97, 99, 105, 111, 110, 101, 115, 99, 117, 109, 112, 108, 105, 109, 105, 101, 110, 116, 111, 114, 101, 115, 116, 97, 117, 114, 97, 110, 116, 101, 115, 100, 105, 115, 112, 111, 115, 105, 99, 105, 195, 179, 110, 99, 111, 110, 115, 101, 99, 117, 101, 110, 99, 105, 97, 101, 108, 101, 99, 116, 114, 195, 179, 110, 105, 99, 97, 97, 112, 108, 105, 99, 97, 99, 105, 111, 110, 101, 115, 100, 101, 115, 99, 111, 110, 101, 99, 116, 97, 100, 111, 105, 110, 115, 116, 97, 108, 97, 99, 105, 195, 179, 110, 114, 101, 97, 108, 105, 122, 97, 99, 105, 195, 179, 110, 117, 116, 105, 108, 105, 122, 97, 99, 105, 195, 179, 110, 101, 110, 99, 105, 99, 108, 111, 112, 101, 100, 105, 97, 101, 110, 102, 101, 114, 109, 101, 100, 97, 100, 101, 115, 105, 110, 115, 116, 114, 117, 109, 101, 110, 116, 111, 115, 101, 120, 112, 101, 114, 105, 101, 110, 99, 105, 97, 115, 105, 110, 115, 116, 105, 116, 117, 99, 105, 195, 179, 110, 112, 97, 114, 116, 105, 99, 117, 108, 97, 114, 101, 115, 115, 117, 98, 99, 97, 116, 101, 103, 111, 114, 105, 97, 209, 130, 208, 190, 208, 187, 209, 140, 208, 186, 208, 190, 208, 160, 208, 190, 209, 129, 209, 129, 208, 184, 208, 184, 209, 128, 208, 176, 208, 177, 208, 190, 209, 130, 209, 139, 208, 177, 208, 190, 208, 187, 209, 140, 209, 136, 208, 181, 208, 191, 209, 128, 208, 190, 209, 129, 209, 130, 208, 190, 208, 188, 208, 190, 208, 182, 208, 181, 209, 130, 208, 181, 208, 180, 209, 128, 209, 131, 208, 179, 208, 184, 209, 133, 209, 129, 208, 187, 209, 131, 209, 135, 208, 176, 208, 181, 209, 129, 208, 181, 208, 185, 209, 135, 208, 176, 209, 129, 208, 178, 209, 129, 208, 181, 208, 179, 208, 180, 208, 176, 208, 160, 208, 190, 209, 129, 209, 129, 208, 184, 209, 143, 208, 156, 208, 190, 209, 129, 208, 186, 208, 178, 208, 181, 208, 180, 209, 128, 209, 131, 208, 179, 208, 184, 208, 181, 208, 179, 208, 190, 209, 128, 208, 190, 208, 180, 208, 176, 208, 178, 208, 190, 208, 191, 209, 128, 208, 190, 209, 129, 208, 180, 208, 176, 208, 189, 208, 189, 209, 139, 209, 133, 208, 180, 208, 190, 208, 187, 208, 182, 208, 189, 209, 139, 208, 184, 208, 188, 208, 181, 208, 189, 208, 189, 208, 190, 208, 156, 208, 190, 209, 129, 208, 186, 208, 178, 209, 139, 209, 128, 209, 131, 208, 177, 208, 187, 208, 181, 208, 185, 208, 156, 208, 190, 209, 129, 208, 186, 208, 178, 208, 176, 209, 129, 209, 130, 209, 128, 208, 176, 208, 189, 209, 139, 208, 189, 208, 184, 209, 135, 208, 181, 208, 179, 208, 190, 209, 128, 208, 176, 208, 177, 208, 190, 209, 130, 208, 181, 208, 180, 208, 190, 208, 187, 208, 182, 208, 181, 208, 189, 209, 131, 209, 129, 208, 187, 209, 131, 208, 179, 208, 184, 209, 130, 208, 181, 208, 191, 208, 181, 209, 128, 209, 140, 208, 158, 208, 180, 208, 189, 208, 176, 208, 186, 208, 190, 208, 191, 208, 190, 209, 130, 208, 190, 208, 188, 209, 131, 209, 128, 208, 176, 208, 177, 208, 190, 209, 130, 209, 131, 208, 176, 208, 191, 209, 128, 208, 181, 208, 187, 209, 143, 208, 178, 208, 190, 208, 190, 208, 177, 209, 137, 208, 181, 208, 190, 208, 180, 208, 189, 208, 190, 208, 179, 208, 190, 209, 129, 208, 178, 208, 190, 208, 181, 208, 179, 208, 190, 209, 129, 209, 130, 208, 176, 209, 130, 209, 140, 208, 184, 208, 180, 209, 128, 209, 131, 208, 179, 208, 190, 208, 185, 209, 132, 208, 190, 209, 128, 209, 131, 208, 188, 208, 181, 209, 133, 208, 190, 209, 128, 208, 190, 209, 136, 208, 190, 208, 191, 209, 128, 208, 190, 209, 130, 208, 184, 208, 178, 209, 129, 209, 129, 209, 139, 208, 187, 208, 186, 208, 176, 208, 186, 208, 176, 208, 182, 208, 180, 209, 139, 208, 185, 208, 178, 208, 187, 208, 176, 209, 129, 209, 130, 208, 184, 208, 179, 209, 128, 209, 131, 208, 191, 208, 191, 209, 139, 208, 178, 208, 188, 208, 181, 209, 129, 209, 130, 208, 181, 209, 128, 208, 176, 208, 177, 208, 190, 209, 130, 208, 176, 209, 129, 208, 186, 208, 176, 208, 183, 208, 176, 208, 187, 208, 191, 208, 181, 209, 128, 208, 178, 209, 139, 208, 185, 208, 180, 208, 181, 208, 187, 208, 176, 209, 130, 209, 140, 208, 180, 208, 181, 208, 189, 209, 140, 208, 179, 208, 184, 208, 191, 208, 181, 209, 128, 208, 184, 208, 190, 208, 180, 208, 177, 208, 184, 208, 183, 208, 189, 208, 181, 209, 129, 208, 190, 209, 129, 208, 189, 208, 190, 208, 178, 208, 181, 208, 188, 208, 190, 208, 188, 208, 181, 208, 189, 209, 130, 208, 186, 209, 131, 208, 191, 208, 184, 209, 130, 209, 140, 208, 180, 208, 190, 208, 187, 208, 182, 208, 189, 208, 176, 209, 128, 208, 176, 208, 188, 208, 186, 208, 176, 209, 133, 208, 189, 208, 176, 209, 135, 208, 176, 208, 187, 208, 190, 208, 160, 208, 176, 208, 177, 208, 190, 209, 130, 208, 176, 208, 162, 208, 190, 208, 187, 209, 140, 208, 186, 208, 190, 209, 129, 208, 190, 208, 178, 209, 129, 208, 181, 208, 188, 208, 178, 209, 130, 208, 190, 209, 128, 208, 190, 208, 185, 208, 189, 208, 176, 209, 135, 208, 176, 208, 187, 208, 176, 209, 129, 208, 191, 208, 184, 209, 129, 208, 190, 208, 186, 209, 129, 208, 187, 209, 131, 208, 182, 208, 177, 209, 139, 209, 129, 208, 184, 209, 129, 209, 130, 208, 181, 208, 188, 208, 191, 208, 181, 209, 135, 208, 176, 209, 130, 208, 184, 208, 189, 208, 190, 208, 178, 208, 190, 208, 179, 208, 190, 208, 191, 208, 190, 208, 188, 208, 190, 209, 137, 208, 184, 209, 129, 208, 176, 208, 185, 209, 130, 208, 190, 208, 178, 208, 191, 208, 190, 209, 135, 208, 181, 208, 188, 209, 131, 208, 191, 208, 190, 208, 188, 208, 190, 209, 137, 209, 140, 208, 180, 208, 190, 208, 187, 208, 182, 208, 189, 208, 190, 209, 129, 209, 129, 209, 139, 208, 187, 208, 186, 208, 184, 208, 177, 209, 139, 209, 129, 209, 130, 209, 128, 208, 190, 208, 180, 208, 176, 208, 189, 208, 189, 209, 139, 208, 181, 208, 188, 208, 189, 208, 190, 208, 179, 208, 184, 208, 181, 208, 191, 209, 128, 208, 190, 208, 181, 208, 186, 209, 130, 208, 161, 208, 181, 208, 185, 209, 135, 208, 176, 209, 129, 208, 188, 208, 190, 208, 180, 208, 181, 208, 187, 208, 184, 209, 130, 208, 176, 208, 186, 208, 190, 208, 179, 208, 190, 208, 190, 208, 189, 208, 187, 208, 176, 208, 185, 208, 189, 208, 179, 208, 190, 209, 128, 208, 190, 208, 180, 208, 181, 208, 178, 208, 181, 209, 128, 209, 129, 208, 184, 209, 143, 209, 129, 209, 130, 209, 128, 208, 176, 208, 189, 208, 181, 209, 132, 208, 184, 208, 187, 209, 140, 208, 188, 209, 139, 209, 131, 209, 128, 208, 190, 208, 178, 208, 189, 209, 143, 209, 128, 208, 176, 208, 183, 208, 189, 209, 139, 209, 133, 208, 184, 209, 129, 208, 186, 208, 176, 209, 130, 209, 140, 208, 189, 208, 181, 208, 180, 208, 181, 208, 187, 209, 142, 209, 143, 208, 189, 208, 178, 208, 176, 209, 128, 209, 143, 208, 188, 208, 181, 208, 189, 209, 140, 209, 136, 208, 181, 208, 188, 208, 189, 208, 190, 208, 179, 208, 184, 209, 133, 208, 180, 208, 176, 208, 189, 208, 189, 208, 190, 208, 185, 208, 183, 208, 189, 208, 176, 209, 135, 208, 184, 209, 130, 208, 189, 208, 181, 208, 187, 209, 140, 208, 183, 209, 143, 209, 132, 208, 190, 209, 128, 209, 131, 208, 188, 208, 176, 208, 162, 208, 181, 208, 191, 208, 181, 209, 128, 209, 140, 208, 188, 208, 181, 209, 129, 209, 143, 209, 134, 208, 176, 208, 183, 208, 176, 209, 137, 208, 184, 209, 130, 209, 139, 208, 155, 209, 131, 209, 135, 209, 136, 208, 184, 208, 181, 224, 164, 168, 224, 164, 185, 224, 165, 128, 224, 164, 130, 224, 164, 149, 224, 164, 176, 224, 164, 168, 224, 165, 135, 224, 164, 133, 224, 164, 170, 224, 164, 168, 224, 165, 135, 224, 164, 149, 224, 164, 191, 224, 164, 175, 224, 164, 190, 224, 164, 149, 224, 164, 176, 224, 165, 135, 224, 164, 130, 224, 164, 133, 224, 164, 168, 224, 165, 141, 224, 164, 175, 224, 164, 149, 224, 165, 141, 224, 164, 175, 224, 164, 190, 224, 164, 151, 224, 164, 190, 224, 164, 135, 224, 164, 161, 224, 164, 172, 224, 164, 190, 224, 164, 176, 224, 165, 135, 224, 164, 149, 224, 164, 191, 224, 164, 184, 224, 165, 128, 224, 164, 166, 224, 164, 191, 224, 164, 175, 224, 164, 190, 224, 164, 170, 224, 164, 185, 224, 164, 178, 224, 165, 135, 224, 164, 184, 224, 164, 191, 224, 164, 130, 224, 164, 185, 224, 164, 173, 224, 164, 190, 224, 164, 176, 224, 164, 164, 224, 164, 133, 224, 164, 170, 224, 164, 168, 224, 165, 128, 224, 164, 181, 224, 164, 190, 224, 164, 178, 224, 165, 135, 224, 164, 184, 224, 165, 135, 224, 164, 181, 224, 164, 190, 224, 164, 149, 224, 164, 176, 224, 164, 164, 224, 165, 135, 224, 164, 174, 224, 165, 135, 224, 164, 176, 224, 165, 135, 224, 164, 185, 224, 165, 139, 224, 164, 168, 224, 165, 135, 224, 164, 184, 224, 164, 149, 224, 164, 164, 224, 165, 135, 224, 164, 172, 224, 164, 185, 224, 165, 129, 224, 164, 164, 224, 164, 184, 224, 164, 190, 224, 164, 135, 224, 164, 159, 224, 164, 185, 224, 165, 139, 224, 164, 151, 224, 164, 190, 224, 164, 156, 224, 164, 190, 224, 164, 168, 224, 165, 135, 224, 164, 174, 224, 164, 191, 224, 164, 168, 224, 164, 159, 224, 164, 149, 224, 164, 176, 224, 164, 164, 224, 164, 190, 224, 164, 149, 224, 164, 176, 224, 164, 168, 224, 164, 190, 224, 164, 137, 224, 164, 168, 224, 164, 149, 224, 165, 135, 224, 164, 175, 224, 164, 185, 224, 164, 190, 224, 164, 129, 224, 164, 184, 224, 164, 172, 224, 164, 184, 224, 165, 135, 224, 164, 173, 224, 164, 190, 224, 164, 183, 224, 164, 190, 224, 164, 134, 224, 164, 170, 224, 164, 149, 224, 165, 135, 224, 164, 178, 224, 164, 191, 224, 164, 175, 224, 165, 135, 224, 164, 182, 224, 165, 129, 224, 164, 176, 224, 165, 130, 224, 164, 135, 224, 164, 184, 224, 164, 149, 224, 165, 135, 224, 164, 152, 224, 164, 130, 224, 164, 159, 224, 165, 135, 224, 164, 174, 224, 165, 135, 224, 164, 176, 224, 165, 128, 224, 164, 184, 224, 164, 149, 224, 164, 164, 224, 164, 190, 224, 164, 174, 224, 165, 135, 224, 164, 176, 224, 164, 190, 224, 164, 178, 224, 165, 135, 224, 164, 149, 224, 164, 176, 224, 164, 133, 224, 164, 167, 224, 164, 191, 224, 164, 149, 224, 164, 133, 224, 164, 170, 224, 164, 168, 224, 164, 190, 224, 164, 184, 224, 164, 174, 224, 164, 190, 224, 164, 156, 224, 164, 174, 224, 165, 129, 224, 164, 157, 224, 165, 135, 224, 164, 149, 224, 164, 190, 224, 164, 176, 224, 164, 163, 224, 164, 185, 224, 165, 139, 224, 164, 164, 224, 164, 190, 224, 164, 149, 224, 164, 161, 224, 164, 188, 224, 165, 128, 224, 164, 175, 224, 164, 185, 224, 164, 190, 224, 164, 130, 224, 164, 185, 224, 165, 139, 224, 164, 159, 224, 164, 178, 224, 164, 182, 224, 164, 172, 224, 165, 141, 224, 164, 166, 224, 164, 178, 224, 164, 191, 224, 164, 175, 224, 164, 190, 224, 164, 156, 224, 165, 128, 224, 164, 181, 224, 164, 168, 224, 164, 156, 224, 164, 190, 224, 164, 164, 224, 164, 190, 224, 164, 149, 224, 165, 136, 224, 164, 184, 224, 165, 135, 224, 164, 134, 224, 164, 170, 224, 164, 149, 224, 164, 190, 224, 164, 181, 224, 164, 190, 224, 164, 178, 224, 165, 128, 224, 164, 166, 224, 165, 135, 224, 164, 168, 224, 165, 135, 224, 164, 170, 224, 165, 130, 224, 164, 176, 224, 165, 128, 224, 164, 170, 224, 164, 190, 224, 164, 168, 224, 165, 128, 224, 164, 137, 224, 164, 184, 224, 164, 149, 224, 165, 135, 224, 164, 185, 224, 165, 139, 224, 164, 151, 224, 165, 128, 224, 164, 172, 224, 165, 136, 224, 164, 160, 224, 164, 149, 224, 164, 134, 224, 164, 170, 224, 164, 149, 224, 165, 128, 224, 164, 181, 224, 164, 176, 224, 165, 141, 224, 164, 183, 224, 164, 151, 224, 164, 190, 224, 164, 130, 224, 164, 181, 224, 164, 134, 224, 164, 170, 224, 164, 149, 224, 165, 139, 224, 164, 156, 224, 164, 191, 224, 164, 178, 224, 164, 190, 224, 164, 156, 224, 164, 190, 224, 164, 168, 224, 164, 190, 224, 164, 184, 224, 164, 185, 224, 164, 174, 224, 164, 164, 224, 164, 185, 224, 164, 174, 224, 165, 135, 224, 164, 130, 224, 164, 137, 224, 164, 168, 224, 164, 149, 224, 165, 128, 224, 164, 175, 224, 164, 190, 224, 164, 185, 224, 165, 130, 224, 164, 166, 224, 164, 176, 224, 165, 141, 224, 164, 156, 224, 164, 184, 224, 165, 130, 224, 164, 154, 224, 165, 128, 224, 164, 170, 224, 164, 184, 224, 164, 130, 224, 164, 166, 224, 164, 184, 224, 164, 181, 224, 164, 190, 224, 164, 178, 224, 164, 185, 224, 165, 139, 224, 164, 168, 224, 164, 190, 224, 164, 185, 224, 165, 139, 224, 164, 164, 224, 165, 128, 224, 164, 156, 224, 165, 136, 224, 164, 184, 224, 165, 135, 224, 164, 181, 224, 164, 190, 224, 164, 170, 224, 164, 184, 224, 164, 156, 224, 164, 168, 224, 164, 164, 224, 164, 190, 224, 164, 168, 224, 165, 135, 224, 164, 164, 224, 164, 190, 224, 164, 156, 224, 164, 190, 224, 164, 176, 224, 165, 128, 224, 164, 152, 224, 164, 190, 224, 164, 175, 224, 164, 178, 224, 164, 156, 224, 164, 191, 224, 164, 178, 224, 165, 135, 224, 164, 168, 224, 165, 128, 224, 164, 154, 224, 165, 135, 224, 164, 156, 224, 164, 190, 224, 164, 130, 224, 164, 154, 224, 164, 170, 224, 164, 164, 224, 165, 141, 224, 164, 176, 224, 164, 151, 224, 165, 130, 224, 164, 151, 224, 164, 178, 224, 164, 156, 224, 164, 190, 224, 164, 164, 224, 165, 135, 224, 164, 172, 224, 164, 190, 224, 164, 185, 224, 164, 176, 224, 164, 134, 224, 164, 170, 224, 164, 168, 224, 165, 135, 224, 164, 181, 224, 164, 190, 224, 164, 185, 224, 164, 168, 224, 164, 135, 224, 164, 184, 224, 164, 149, 224, 164, 190, 224, 164, 184, 224, 165, 129, 224, 164, 172, 224, 164, 185, 224, 164, 176, 224, 164, 185, 224, 164, 168, 224, 165, 135, 224, 164, 135, 224, 164, 184, 224, 164, 184, 224, 165, 135, 224, 164, 184, 224, 164, 185, 224, 164, 191, 224, 164, 164, 224, 164, 172, 224, 164, 161, 224, 164, 188, 224, 165, 135, 224, 164, 152, 224, 164, 159, 224, 164, 168, 224, 164, 190, 224, 164, 164, 224, 164, 178, 224, 164, 190, 224, 164, 182, 224, 164, 170, 224, 164, 190, 224, 164, 130, 224, 164, 154, 224, 164, 182, 224, 165, 141, 224, 164, 176, 224, 165, 128, 224, 164, 172, 224, 164, 161, 224, 164, 188, 224, 165, 128, 224, 164, 185, 224, 165, 139, 224, 164, 164, 224, 165, 135, 224, 164, 184, 224, 164, 190, 224, 164, 136, 224, 164, 159, 224, 164, 182, 224, 164, 190, 224, 164, 175, 224, 164, 166, 224, 164, 184, 224, 164, 149, 224, 164, 164, 224, 165, 128, 224, 164, 156, 224, 164, 190, 224, 164, 164, 224, 165, 128, 224, 164, 181, 224, 164, 190, 224, 164, 178, 224, 164, 190, 224, 164, 185, 224, 164, 156, 224, 164, 190, 224, 164, 176, 224, 164, 170, 224, 164, 159, 224, 164, 168, 224, 164, 190, 224, 164, 176, 224, 164, 150, 224, 164, 168, 224, 165, 135, 224, 164, 184, 224, 164, 161, 224, 164, 188, 224, 164, 149, 224, 164, 174, 224, 164, 191, 224, 164, 178, 224, 164, 190, 224, 164, 137, 224, 164, 184, 224, 164, 149, 224, 165, 128, 224, 164, 149, 224, 165, 135, 224, 164, 181, 224, 164, 178, 224, 164, 178, 224, 164, 151, 224, 164, 164, 224, 164, 190, 224, 164, 150, 224, 164, 190, 224, 164, 168, 224, 164, 190, 224, 164, 133, 224, 164, 176, 224, 165, 141, 224, 164, 165, 224, 164, 156, 224, 164, 185, 224, 164, 190, 224, 164, 130, 224, 164, 166, 224, 165, 135, 224, 164, 150, 224, 164, 190, 224, 164, 170, 224, 164, 185, 224, 164, 178, 224, 165, 128, 224, 164, 168, 224, 164, 191, 224, 164, 175, 224, 164, 174, 224, 164, 172, 224, 164, 191, 224, 164, 168, 224, 164, 190, 224, 164, 172, 224, 165, 136, 224, 164, 130, 224, 164, 149, 224, 164, 149, 224, 164, 185, 224, 165, 128, 224, 164, 130, 224, 164, 149, 224, 164, 185, 224, 164, 168, 224, 164, 190, 224, 164, 166, 224, 165, 135, 224, 164, 164, 224, 164, 190, 224, 164, 185, 224, 164, 174, 224, 164, 178, 224, 165, 135, 224, 164, 149, 224, 164, 190, 224, 164, 171, 224, 165, 128, 224, 164, 156, 224, 164, 172, 224, 164, 149, 224, 164, 191, 224, 164, 164, 224, 165, 129, 224, 164, 176, 224, 164, 164, 224, 164, 174, 224, 164, 190, 224, 164, 130, 224, 164, 151, 224, 164, 181, 224, 164, 185, 224, 165, 128, 224, 164, 130, 224, 164, 176, 224, 165, 139, 224, 164, 156, 224, 164, 188, 224, 164, 174, 224, 164, 191, 224, 164, 178, 224, 165, 128, 224, 164, 134, 224, 164, 176, 224, 165, 139, 224, 164, 170, 224, 164, 184, 224, 165, 135, 224, 164, 168, 224, 164, 190, 224, 164, 175, 224, 164, 190, 224, 164, 166, 224, 164, 181, 224, 164, 178, 224, 165, 135, 224, 164, 168, 224, 165, 135, 224, 164, 150, 224, 164, 190, 224, 164, 164, 224, 164, 190, 224, 164, 149, 224, 164, 176, 224, 165, 128, 224, 164, 172, 224, 164, 137, 224, 164, 168, 224, 164, 149, 224, 164, 190, 224, 164, 156, 224, 164, 181, 224, 164, 190, 224, 164, 172, 224, 164, 170, 224, 165, 130, 224, 164, 176, 224, 164, 190, 224, 164, 172, 224, 164, 161, 224, 164, 188, 224, 164, 190, 224, 164, 184, 224, 165, 140, 224, 164, 166, 224, 164, 190, 224, 164, 182, 224, 165, 135, 224, 164, 175, 224, 164, 176, 224, 164, 149, 224, 164, 191, 224, 164, 175, 224, 165, 135, 224, 164, 149, 224, 164, 185, 224, 164, 190, 224, 164, 130, 224, 164, 133, 224, 164, 149, 224, 164, 184, 224, 164, 176, 224, 164, 172, 224, 164, 168, 224, 164, 190, 224, 164, 143, 224, 164, 181, 224, 164, 185, 224, 164, 190, 224, 164, 130, 224, 164, 184, 224, 165, 141, 224, 164, 165, 224, 164, 178, 224, 164, 174, 224, 164, 191, 224, 164, 178, 224, 165, 135, 224, 164, 178, 224, 165, 135, 224, 164, 150, 224, 164, 149, 224, 164, 181, 224, 164, 191, 224, 164, 183, 224, 164, 175, 224, 164, 149, 224, 165, 141, 224, 164, 176, 224, 164, 130, 224, 164, 184, 224, 164, 174, 224, 165, 130, 224, 164, 185, 224, 164, 165, 224, 164, 190, 224, 164, 168, 224, 164, 190, 216, 170, 216, 179, 216, 170, 216, 183, 217, 138, 216, 185, 217, 133, 216, 180, 216, 167, 216, 177, 217, 131, 216, 169, 216, 168, 217, 136, 216, 167, 216, 179, 216, 183, 216, 169, 216, 167, 217, 132, 216, 181, 217, 129, 216, 173, 216, 169, 217, 133, 217, 136, 216, 167, 216, 182, 217, 138, 216, 185, 216, 167, 217, 132, 216, 174, 216, 167, 216, 181, 216, 169, 216, 167, 217, 132, 217, 133, 216, 178, 217, 138, 216, 175, 216, 167, 217, 132, 216, 185, 216, 167, 217, 133, 216, 169, 216, 167, 217, 132, 217, 131, 216, 167, 216, 170, 216, 168, 216, 167, 217, 132, 216, 177, 216, 175, 217, 136, 216, 175, 216, 168, 216, 177, 217, 134, 216, 167, 217, 133, 216, 172, 216, 167, 217, 132, 216, 175, 217, 136, 217, 132, 216, 169, 216, 167, 217, 132, 216, 185, 216, 167, 217, 132, 217, 133, 216, 167, 217, 132, 217, 133, 217, 136, 217, 130, 216, 185, 216, 167, 217, 132, 216, 185, 216, 177, 216, 168, 217, 138, 216, 167, 217, 132, 216, 179, 216, 177, 217, 138, 216, 185, 216, 167, 217, 132, 216, 172, 217, 136, 216, 167, 217, 132, 216, 167, 217, 132, 216, 176, 217, 135, 216, 167, 216, 168, 216, 167, 217, 132, 216, 173, 217, 138, 216, 167, 216, 169, 216, 167, 217, 132, 216, 173, 217, 130, 217, 136, 217, 130, 216, 167, 217, 132, 217, 131, 216, 177, 217, 138, 217, 133, 216, 167, 217, 132, 216, 185, 216, 177, 216, 167, 217, 130, 217, 133, 216, 173, 217, 129, 217, 136, 216, 184, 216, 169, 216, 167, 217, 132, 216, 171, 216, 167, 217, 134, 217, 138, 217, 133, 216, 180, 216, 167, 217, 135, 216, 175, 216, 169, 216, 167, 217, 132, 217, 133, 216, 177, 216, 163, 216, 169, 216, 167, 217, 132, 217, 130, 216, 177, 216, 162, 217, 134, 216, 167, 217, 132, 216, 180, 216, 168, 216, 167, 216, 168, 216, 167, 217, 132, 216, 173, 217, 136, 216, 167, 216, 177, 216, 167, 217, 132, 216, 172, 216, 175, 217, 138, 216, 175, 216, 167, 217, 132, 216, 163, 216, 179, 216, 177, 216, 169, 216, 167, 217, 132, 216, 185, 217, 132, 217, 136, 217, 133, 217, 133, 216, 172, 217, 133, 217, 136, 216, 185, 216, 169, 216, 167, 217, 132, 216, 177, 216, 173, 217, 133, 217, 134, 216, 167, 217, 132, 217, 134, 217, 130, 216, 167, 216, 183, 217, 129, 217, 132, 216, 179, 216, 183, 217, 138, 217, 134, 216, 167, 217, 132, 217, 131, 217, 136, 217, 138, 216, 170, 216, 167, 217, 132, 216, 175, 217, 134, 217, 138, 216, 167, 216, 168, 216, 177, 217, 131, 216, 167, 216, 170, 217, 135, 216, 167, 217, 132, 216, 177, 217, 138, 216, 167, 216, 182, 216, 170, 216, 173, 217, 138, 216, 167, 216, 170, 217, 138, 216, 168, 216, 170, 217, 136, 217, 130, 217, 138, 216, 170, 216, 167, 217, 132, 216, 163, 217, 136, 217, 132, 217, 137, 216, 167, 217, 132, 216, 168, 216, 177, 217, 138, 216, 175, 216, 167, 217, 132, 217, 131, 217, 132, 216, 167, 217, 133, 216, 167, 217, 132, 216, 177, 216, 167, 216, 168, 216, 183, 216, 167, 217, 132, 216, 180, 216, 174, 216, 181, 217, 138, 216, 179, 217, 138, 216, 167, 216, 177, 216, 167, 216, 170, 216, 167, 217, 132, 216, 171, 216, 167, 217, 132, 216, 171, 216, 167, 217, 132, 216, 181, 217, 132, 216, 167, 216, 169, 216, 167, 217, 132, 216, 173, 216, 175, 217, 138, 216, 171, 216, 167, 217, 132, 216, 178, 217, 136, 216, 167, 216, 177, 216, 167, 217, 132, 216, 174, 217, 132, 217, 138, 216, 172, 216, 167, 217, 132, 216, 172, 217, 133, 217, 138, 216, 185, 216, 167, 217, 132, 216, 185, 216, 167, 217, 133, 217, 135, 216, 167, 217, 132, 216, 172, 217, 133, 216, 167, 217, 132, 216, 167, 217, 132, 216, 179, 216, 167, 216, 185, 216, 169, 217, 133, 216, 180, 216, 167, 217, 135, 216, 175, 217, 135, 216, 167, 217, 132, 216, 177, 216, 166, 217, 138, 216, 179, 216, 167, 217, 132, 216, 175, 216, 174, 217, 136, 217, 132, 216, 167, 217, 132, 217, 129, 217, 134, 217, 138, 216, 169, 216, 167, 217, 132, 217, 131, 216, 170, 216, 167, 216, 168, 216, 167, 217, 132, 216, 175, 217, 136, 216, 177, 217, 138, 216, 167, 217, 132, 216, 175, 216, 177, 217, 136, 216, 179, 216, 167, 216, 179, 216, 170, 216, 186, 216, 177, 217, 130, 216, 170, 216, 181, 216, 167, 217, 133, 217, 138, 217, 133, 216, 167, 217, 132, 216, 168, 217, 134, 216, 167, 216, 170, 216, 167, 217, 132, 216, 185, 216, 184, 217, 138, 217, 133, 101, 110, 116, 101, 114, 116, 97, 105, 110, 109, 101, 110, 116, 117, 110, 100, 101, 114, 115, 116, 97, 110, 100, 105, 110, 103, 32, 61, 32, 102, 117, 110, 99, 116, 105, 111, 110, 40, 41, 46, 106, 112, 103, 34, 32, 119, 105, 100, 116, 104, 61, 34, 99, 111, 110, 102, 105, 103, 117, 114, 97, 116, 105, 111, 110, 46, 112, 110, 103, 34, 32, 119, 105, 100, 116, 104, 61, 34, 60, 98, 111, 100, 121, 32, 99, 108, 97, 115, 115, 61, 34, 77, 97, 116, 104, 46, 114, 97, 110, 100, 111, 109, 40, 41, 99, 111, 110, 116, 101, 109, 112, 111, 114, 97, 114, 121, 32, 85, 110, 105, 116, 101, 100, 32, 83, 116, 97, 116, 101, 115, 99, 105, 114, 99, 117, 109, 115, 116, 97, 110, 99, 101, 115, 46, 97, 112, 112, 101, 110, 100, 67, 104, 105, 108, 100, 40, 111, 114, 103, 97, 110, 105, 122, 97, 116, 105, 111, 110, 115, 60, 115, 112, 97, 110, 32, 99, 108, 97, 115, 115, 61, 34, 34, 62, 60, 105, 109, 103, 32, 115, 114, 99, 61, 34, 47, 100, 105, 115, 116, 105, 110, 103, 117, 105, 115, 104, 101, 100, 116, 104, 111, 117, 115, 97, 110, 100, 115, 32, 111, 102, 32, 99, 111, 109, 109, 117, 110, 105, 99, 97, 116, 105, 111, 110, 99, 108, 101, 97, 114, 34, 62, 60, 47, 100, 105, 118, 62, 105, 110, 118, 101, 115, 116, 105, 103, 97, 116, 105, 111, 110, 102, 97, 118, 105, 99, 111, 110, 46, 105, 99, 111, 34, 32, 109, 97, 114, 103, 105, 110, 45, 114, 105, 103, 104, 116, 58, 98, 97, 115, 101, 100, 32, 111, 110, 32, 116, 104, 101, 32, 77, 97, 115, 115, 97, 99, 104, 117, 115, 101, 116, 116, 115, 116, 97, 98, 108, 101, 32, 98, 111, 114, 100, 101, 114, 61, 105, 110, 116, 101, 114, 110, 97, 116, 105, 111, 110, 97, 108, 97, 108, 115, 111, 32, 107, 110, 111, 119, 110, 32, 97, 115, 112, 114, 111, 110, 117, 110, 99, 105, 97, 116, 105, 111, 110, 98, 97, 99, 107, 103, 114, 111, 117, 110, 100, 58, 35, 102, 112, 97, 100, 100, 105, 110, 103, 45, 108, 101, 102, 116, 58, 70, 111, 114, 32, 101, 120, 97, 109, 112, 108, 101, 44, 32, 109, 105, 115, 99, 101, 108, 108, 97, 110, 101, 111, 117, 115, 38, 108, 116, 59, 47, 109, 97, 116, 104, 38, 103, 116, 59, 112, 115, 121, 99, 104, 111, 108, 111, 103, 105, 99, 97, 108, 105, 110, 32, 112, 97, 114, 116, 105, 99, 117, 108, 97, 114, 101, 97, 114, 99, 104, 34, 32, 116, 121, 112, 101, 61, 34, 102, 111, 114, 109, 32, 109, 101, 116, 104, 111, 100, 61, 34, 97, 115, 32, 111, 112, 112, 111, 115, 101, 100, 32, 116, 111, 83, 117, 112, 114, 101, 109, 101, 32, 67, 111, 117, 114, 116, 111, 99, 99, 97, 115, 105, 111, 110, 97, 108, 108, 121, 32, 65, 100, 100, 105, 116, 105, 111, 110, 97, 108, 108, 121, 44, 78, 111, 114, 116, 104, 32, 65, 109, 101, 114, 105, 99, 97, 112, 120, 59, 98, 97, 99, 107, 103, 114, 111, 117, 110, 100, 111, 112, 112, 111, 114, 116, 117, 110, 105, 116, 105, 101, 115, 69, 110, 116, 101, 114, 116, 97, 105, 110, 109, 101, 110, 116, 46, 116, 111, 76, 111, 119, 101, 114, 67, 97, 115, 101, 40, 109, 97, 110, 117, 102, 97, 99, 116, 117, 114, 105, 110, 103, 112, 114, 111, 102, 101, 115, 115, 105, 111, 110, 97, 108, 32, 99, 111, 109, 98, 105, 110, 101, 100, 32, 119, 105, 116, 104, 70, 111, 114, 32, 105, 110, 115, 116, 97, 110, 99, 101, 44, 99, 111, 110, 115, 105, 115, 116, 105, 110, 103, 32, 111, 102, 34, 32, 109, 97, 120, 108, 101, 110, 103, 116, 104, 61, 34, 114, 101, 116, 117, 114, 110, 32, 102, 97, 108, 115, 101, 59, 99, 111, 110, 115, 99, 105, 111, 117, 115, 110, 101, 115, 115, 77, 101, 100, 105, 116, 101, 114, 114, 97, 110, 101, 97, 110, 101, 120, 116, 114, 97, 111, 114, 100, 105, 110, 97, 114, 121, 97, 115, 115, 97, 115, 115, 105, 110, 97, 116, 105, 111, 110, 115, 117, 98, 115, 101, 113, 117, 101, 110, 116, 108, 121, 32, 98, 117, 116, 116, 111, 110, 32, 116, 121, 112, 101, 61, 34, 116, 104, 101, 32, 110, 117, 109, 98, 101, 114, 32, 111, 102, 116, 104, 101, 32, 111, 114, 105, 103, 105, 110, 97, 108, 32, 99, 111, 109, 112, 114, 101, 104, 101, 110, 115, 105, 118, 101, 114, 101, 102, 101, 114, 115, 32, 116, 111, 32, 116, 104, 101, 60, 47, 117, 108, 62, 10, 60, 47, 100, 105, 118, 62, 10, 112, 104, 105, 108, 111, 115, 111, 112, 104, 105, 99, 97, 108, 108, 111, 99, 97, 116, 105, 111, 110, 46, 104, 114, 101, 102, 119, 97, 115, 32, 112, 117, 98, 108, 105, 115, 104, 101, 100, 83, 97, 110, 32, 70, 114, 97, 110, 99, 105, 115, 99, 111, 40, 102, 117, 110, 99, 116, 105, 111, 110, 40, 41, 123, 10, 60, 100, 105, 118, 32, 105, 100, 61, 34, 109, 97, 105, 110, 115, 111, 112, 104, 105, 115, 116, 105, 99, 97, 116, 101, 100, 109, 97, 116, 104, 101, 109, 97, 116, 105, 99, 97, 108, 32, 47, 104, 101, 97, 100, 62, 13, 10, 60, 98, 111, 100, 121, 115, 117, 103, 103, 101, 115, 116, 115, 32, 116, 104, 97, 116, 100, 111, 99, 117, 109, 101, 110, 116, 97, 116, 105, 111, 110, 99, 111, 110, 99, 101, 110, 116, 114, 97, 116, 105, 111, 110, 114, 101, 108, 97, 116, 105, 111, 110, 115, 104, 105, 112, 115, 109, 97, 121, 32, 104, 97, 118, 101, 32, 98, 101, 101, 110, 40, 102, 111, 114, 32, 101, 120, 97, 109, 112, 108, 101, 44, 84, 104, 105, 115, 32, 97, 114, 116, 105, 99, 108, 101, 32, 105, 110, 32, 115, 111, 109, 101, 32, 99, 97, 115, 101, 115, 112, 97, 114, 116, 115, 32, 111, 102, 32, 116, 104, 101, 32, 100, 101, 102, 105, 110, 105, 116, 105, 111, 110, 32, 111, 102, 71, 114, 101, 97, 116, 32, 66, 114, 105, 116, 97, 105, 110, 32, 99, 101, 108, 108, 112, 97, 100, 100, 105, 110, 103, 61, 101, 113, 117, 105, 118, 97, 108, 101, 110, 116, 32, 116, 111, 112, 108, 97, 99, 101, 104, 111, 108, 100, 101, 114, 61, 34, 59, 32, 102, 111, 110, 116, 45, 115, 105, 122, 101, 58, 32, 106, 117, 115, 116, 105, 102, 105, 99, 97, 116, 105, 111, 110, 98, 101, 108, 105, 101, 118, 101, 100, 32, 116, 104, 97, 116, 115, 117, 102, 102, 101, 114, 101, 100, 32, 102, 114, 111, 109, 97, 116, 116, 101, 109, 112, 116, 101, 100, 32, 116, 111, 32, 108, 101, 97, 100, 101, 114, 32, 111, 102, 32, 116, 104, 101, 99, 114, 105, 112, 116, 34, 32, 115, 114, 99, 61, 34, 47, 40, 102, 117, 110, 99, 116, 105, 111, 110, 40, 41, 32, 123, 97, 114, 101, 32, 97, 118, 97, 105, 108, 97, 98, 108, 101, 10, 9, 60, 108, 105, 110, 107, 32, 114, 101, 108, 61, 34, 32, 115, 114, 99, 61, 39, 104, 116, 116, 112, 58, 47, 47, 105, 110, 116, 101, 114, 101, 115, 116, 101, 100, 32, 105, 110, 99, 111, 110, 118, 101, 110, 116, 105, 111, 110, 97, 108, 32, 34, 32, 97, 108, 116, 61, 34, 34, 32, 47, 62, 60, 47, 97, 114, 101, 32, 103, 101, 110, 101, 114, 97, 108, 108, 121, 104, 97, 115, 32, 97, 108, 115, 111, 32, 98, 101, 101, 110, 109, 111, 115, 116, 32, 112, 111, 112, 117, 108, 97, 114, 32, 99, 111, 114, 114, 101, 115, 112, 111, 110, 100, 105, 110, 103, 99, 114, 101, 100, 105, 116, 101, 100, 32, 119, 105, 116, 104, 116, 121, 108, 101, 61, 34, 98, 111, 114, 100, 101, 114, 58, 60, 47, 97, 62, 60, 47, 115, 112, 97, 110, 62, 60, 47, 46, 103, 105, 102, 34, 32, 119, 105, 100, 116, 104, 61, 34, 60, 105, 102, 114, 97, 109, 101, 32, 115, 114, 99, 61, 34, 116, 97, 98, 108, 101, 32, 99, 108, 97, 115, 115, 61, 34, 105, 110, 108, 105, 110, 101, 45, 98, 108, 111, 99, 107, 59, 97, 99, 99, 111, 114, 100, 105, 110, 103, 32, 116, 111, 32, 116, 111, 103, 101, 116, 104, 101, 114, 32, 119, 105, 116, 104, 97, 112, 112, 114, 111, 120, 105, 109, 97, 116, 101, 108, 121, 112, 97, 114, 108, 105, 97, 109, 101, 110, 116, 97, 114, 121, 109, 111, 114, 101, 32, 97, 110, 100, 32, 109, 111, 114, 101, 100, 105, 115, 112, 108, 97, 121, 58, 110, 111, 110, 101, 59, 116, 114, 97, 100, 105, 116, 105, 111, 110, 97, 108, 108, 121, 112, 114, 101, 100, 111, 109, 105, 110, 97, 110, 116, 108, 121, 38, 110, 98, 115, 112, 59, 124, 38, 110, 98, 115, 112, 59, 38, 110, 98, 115, 112, 59, 60, 47, 115, 112, 97, 110, 62, 32, 99, 101, 108, 108, 115, 112, 97, 99, 105, 110, 103, 61, 60, 105, 110, 112, 117, 116, 32, 110, 97, 109, 101, 61, 34, 111, 114, 34, 32, 99, 111, 110, 116, 101, 110, 116, 61, 34, 99, 111, 110, 116, 114, 111, 118, 101, 114, 115, 105, 97, 108, 112, 114, 111, 112, 101, 114, 116, 121, 61, 34, 111, 103, 58, 47, 120, 45, 115, 104, 111, 99, 107, 119, 97, 118, 101, 45, 100, 101, 109, 111, 110, 115, 116, 114, 97, 116, 105, 111, 110, 115, 117, 114, 114, 111, 117, 110, 100, 101, 100, 32, 98, 121, 78, 101, 118, 101, 114, 116, 104, 101, 108, 101, 115, 115, 44, 119, 97, 115, 32, 116, 104, 101, 32, 102, 105, 114, 115, 116, 99, 111, 110, 115, 105, 100, 101, 114, 97, 98, 108, 101, 32, 65, 108, 116, 104, 111, 117, 103, 104, 32, 116, 104, 101, 32, 99, 111, 108, 108, 97, 98, 111, 114, 97, 116, 105, 111, 110, 115, 104, 111, 117, 108, 100, 32, 110, 111, 116, 32, 98, 101, 112, 114, 111, 112, 111, 114, 116, 105, 111, 110, 32, 111, 102, 60, 115, 112, 97, 110, 32, 115, 116, 121, 108, 101, 61, 34, 107, 110, 111, 119, 110, 32, 97, 115, 32, 116, 104, 101, 32, 115, 104, 111, 114, 116, 108, 121, 32, 97, 102, 116, 101, 114, 102, 111, 114, 32, 105, 110, 115, 116, 97, 110, 99, 101, 44, 100, 101, 115, 99, 114, 105, 98, 101, 100, 32, 97, 115, 32, 47, 104, 101, 97, 100, 62, 10, 60, 98, 111, 100, 121, 32, 115, 116, 97, 114, 116, 105, 110, 103, 32, 119, 105, 116, 104, 105, 110, 99, 114, 101, 97, 115, 105, 110, 103, 108, 121, 32, 116, 104, 101, 32, 102, 97, 99, 116, 32, 116, 104, 97, 116, 100, 105, 115, 99, 117, 115, 115, 105, 111, 110, 32, 111, 102, 109, 105, 100, 100, 108, 101, 32, 111, 102, 32, 116, 104, 101, 97, 110, 32, 105, 110, 100, 105, 118, 105, 100, 117, 97, 108, 100, 105, 102, 102, 105, 99, 117, 108, 116, 32, 116, 111, 32, 112, 111, 105, 110, 116, 32, 111, 102, 32, 118, 105, 101, 119, 104, 111, 109, 111, 115, 101, 120, 117, 97, 108, 105, 116, 121, 97, 99, 99, 101, 112, 116, 97, 110, 99, 101, 32, 111, 102, 60, 47, 115, 112, 97, 110, 62, 60, 47, 100, 105, 118, 62, 109, 97, 110, 117, 102, 97, 99, 116, 117, 114, 101, 114, 115, 111, 114, 105, 103, 105, 110, 32, 111, 102, 32, 116, 104, 101, 99, 111, 109, 109, 111, 110, 108, 121, 32, 117, 115, 101, 100, 105, 109, 112, 111, 114, 116, 97, 110, 99, 101, 32, 111, 102, 100, 101, 110, 111, 109, 105, 110, 97, 116, 105, 111, 110, 115, 98, 97, 99, 107, 103, 114, 111, 117, 110, 100, 58, 32, 35, 108, 101, 110, 103, 116, 104, 32, 111, 102, 32, 116, 104, 101, 100, 101, 116, 101, 114, 109, 105, 110, 97, 116, 105, 111, 110, 97, 32, 115, 105, 103, 110, 105, 102, 105, 99, 97, 110, 116, 34, 32, 98, 111, 114, 100, 101, 114, 61, 34, 48, 34, 62, 114, 101, 118, 111, 108, 117, 116, 105, 111, 110, 97, 114, 121, 112, 114, 105, 110, 99, 105, 112, 108, 101, 115, 32, 111, 102, 105, 115, 32, 99, 111, 110, 115, 105, 100, 101, 114, 101, 100, 119, 97, 115, 32, 100, 101, 118, 101, 108, 111, 112, 101, 100, 73, 110, 100, 111, 45, 69, 117, 114, 111, 112, 101, 97, 110, 118, 117, 108, 110, 101, 114, 97, 98, 108, 101, 32, 116, 111, 112, 114, 111, 112, 111, 110, 101, 110, 116, 115, 32, 111, 102, 97, 114, 101, 32, 115, 111, 109, 101, 116, 105, 109, 101, 115, 99, 108, 111, 115, 101, 114, 32, 116, 111, 32, 116, 104, 101, 78, 101, 119, 32, 89, 111, 114, 107, 32, 67, 105, 116, 121, 32, 110, 97, 109, 101, 61, 34, 115, 101, 97, 114, 99, 104, 97, 116, 116, 114, 105, 98, 117, 116, 101, 100, 32, 116, 111, 99, 111, 117, 114, 115, 101, 32, 111, 102, 32, 116, 104, 101, 109, 97, 116, 104, 101, 109, 97, 116, 105, 99, 105, 97, 110, 98, 121, 32, 116, 104, 101, 32, 101, 110, 100, 32, 111, 102, 97, 116, 32, 116, 104, 101, 32, 101, 110, 100, 32, 111, 102, 34, 32, 98, 111, 114, 100, 101, 114, 61, 34, 48, 34, 32, 116, 101, 99, 104, 110, 111, 108, 111, 103, 105, 99, 97, 108, 46, 114, 101, 109, 111, 118, 101, 67, 108, 97, 115, 115, 40, 98, 114, 97, 110, 99, 104, 32, 111, 102, 32, 116, 104, 101, 101, 118, 105, 100, 101, 110, 99, 101, 32, 116, 104, 97, 116, 33, 91, 101, 110, 100, 105, 102, 93, 45, 45, 62, 13, 10, 73, 110, 115, 116, 105, 116, 117, 116, 101, 32, 111, 102, 32, 105, 110, 116, 111, 32, 97, 32, 115, 105, 110, 103, 108, 101, 114, 101, 115, 112, 101, 99, 116, 105, 118, 101, 108, 121, 46, 97, 110, 100, 32, 116, 104, 101, 114, 101, 102, 111, 114, 101, 112, 114, 111, 112, 101, 114, 116, 105, 101, 115, 32, 111, 102, 105, 115, 32, 108, 111, 99, 97, 116, 101, 100, 32, 105, 110, 115, 111, 109, 101, 32, 111, 102, 32, 119, 104, 105, 99, 104, 84, 104, 101, 114, 101, 32, 105, 115, 32, 97, 108, 115, 111, 99, 111, 110, 116, 105, 110, 117, 101, 100, 32, 116, 111, 32, 97, 112, 112, 101, 97, 114, 97, 110, 99, 101, 32, 111, 102, 32, 38, 97, 109, 112, 59, 110, 100, 97, 115, 104, 59, 32, 100, 101, 115, 99, 114, 105, 98, 101, 115, 32, 116, 104, 101, 99, 111, 110, 115, 105, 100, 101, 114, 97, 116, 105, 111, 110, 97, 117, 116, 104, 111, 114, 32, 111, 102, 32, 116, 104, 101, 105, 110, 100, 101, 112, 101, 110, 100, 101, 110, 116, 108, 121, 101, 113, 117, 105, 112, 112, 101, 100, 32, 119, 105, 116, 104, 100, 111, 101, 115, 32, 110, 111, 116, 32, 104, 97, 118, 101, 60, 47, 97, 62, 60, 97, 32, 104, 114, 101, 102, 61, 34, 99, 111, 110, 102, 117, 115, 101, 100, 32, 119, 105, 116, 104, 60, 108, 105, 110, 107, 32, 104, 114, 101, 102, 61, 34, 47, 97, 116, 32, 116, 104, 101, 32, 97, 103, 101, 32, 111, 102, 97, 112, 112, 101, 97, 114, 32, 105, 110, 32, 116, 104, 101, 84, 104, 101, 115, 101, 32, 105, 110, 99, 108, 117, 100, 101, 114, 101, 103, 97, 114, 100, 108, 101, 115, 115, 32, 111, 102, 99, 111, 117, 108, 100, 32, 98, 101, 32, 117, 115, 101, 100, 32, 115, 116, 121, 108, 101, 61, 38, 113, 117, 111, 116, 59, 115, 101, 118, 101, 114, 97, 108, 32, 116, 105, 109, 101, 115, 114, 101, 112, 114, 101, 115, 101, 110, 116, 32, 116, 104, 101, 98, 111, 100, 121, 62, 10, 60, 47, 104, 116, 109, 108, 62, 116, 104, 111, 117, 103, 104, 116, 32, 116, 111, 32, 98, 101, 112, 111, 112, 117, 108, 97, 116, 105, 111, 110, 32, 111, 102, 112, 111, 115, 115, 105, 98, 105, 108, 105, 116, 105, 101, 115, 112, 101, 114, 99, 101, 110, 116, 97, 103, 101, 32, 111, 102, 97, 99, 99, 101, 115, 115, 32, 116, 111, 32, 116, 104, 101, 97, 110, 32, 97, 116, 116, 101, 109, 112, 116, 32, 116, 111, 112, 114, 111, 100, 117, 99, 116, 105, 111, 110, 32, 111, 102, 106, 113, 117, 101, 114, 121, 47, 106, 113, 117, 101, 114, 121, 116, 119, 111, 32, 100, 105, 102, 102, 101, 114, 101, 110, 116, 98, 101, 108, 111, 110, 103, 32, 116, 111, 32, 116, 104, 101, 101, 115, 116, 97, 98, 108, 105, 115, 104, 109, 101, 110, 116, 114, 101, 112, 108, 97, 99, 105, 110, 103, 32, 116, 104, 101, 100, 101, 115, 99, 114, 105, 112, 116, 105, 111, 110, 34, 32, 100, 101, 116, 101, 114, 109, 105, 110, 101, 32, 116, 104, 101, 97, 118, 97, 105, 108, 97, 98, 108, 101, 32, 102, 111, 114, 65, 99, 99, 111, 114, 100, 105, 110, 103, 32, 116, 111, 32, 119, 105, 100, 101, 32, 114, 97, 110, 103, 101, 32, 111, 102, 9, 60, 100, 105, 118, 32, 99, 108, 97, 115, 115, 61, 34, 109, 111, 114, 101, 32, 99, 111, 109, 109, 111, 110, 108, 121, 111, 114, 103, 97, 110, 105, 115, 97, 116, 105, 111, 110, 115, 102, 117, 110, 99, 116, 105, 111, 110, 97, 108, 105, 116, 121, 119, 97, 115, 32, 99, 111, 109, 112, 108, 101, 116, 101, 100, 32, 38, 97, 109, 112, 59, 109, 100, 97, 115, 104, 59, 32, 112, 97, 114, 116, 105, 99, 105, 112, 97, 116, 105, 111, 110, 116, 104, 101, 32, 99, 104, 97, 114, 97, 99, 116, 101, 114, 97, 110, 32, 97, 100, 100, 105, 116, 105, 111, 110, 97, 108, 97, 112, 112, 101, 97, 114, 115, 32, 116, 111, 32, 98, 101, 102, 97, 99, 116, 32, 116, 104, 97, 116, 32, 116, 104, 101, 97, 110, 32, 101, 120, 97, 109, 112, 108, 101, 32, 111, 102, 115, 105, 103, 110, 105, 102, 105, 99, 97, 110, 116, 108, 121, 111, 110, 109, 111, 117, 115, 101, 111, 118, 101, 114, 61, 34, 98, 101, 99, 97, 117, 115, 101, 32, 116, 104, 101, 121, 32, 97, 115, 121, 110, 99, 32, 61, 32, 116, 114, 117, 101, 59, 112, 114, 111, 98, 108, 101, 109, 115, 32, 119, 105, 116, 104, 115, 101, 101, 109, 115, 32, 116, 111, 32, 104, 97, 118, 101, 116, 104, 101, 32, 114, 101, 115, 117, 108, 116, 32, 111, 102, 32, 115, 114, 99, 61, 34, 104, 116, 116, 112, 58, 47, 47, 102, 97, 109, 105, 108, 105, 97, 114, 32, 119, 105, 116, 104, 112, 111, 115, 115, 101, 115, 115, 105, 111, 110, 32, 111, 102, 102, 117, 110, 99, 116, 105, 111, 110, 32, 40, 41, 32, 123, 116, 111, 111, 107, 32, 112, 108, 97, 99, 101, 32, 105, 110, 97, 110, 100, 32, 115, 111, 109, 101, 116, 105, 109, 101, 115, 115, 117, 98, 115, 116, 97, 110, 116, 105, 97, 108, 108, 121, 60, 115, 112, 97, 110, 62, 60, 47, 115, 112, 97, 110, 62, 105, 115, 32, 111, 102, 116, 101, 110, 32, 117, 115, 101, 100, 105, 110, 32, 97, 110, 32, 97, 116, 116, 101, 109, 112, 116, 103, 114, 101, 97, 116, 32, 100, 101, 97, 108, 32, 111, 102, 69, 110, 118, 105, 114, 111, 110, 109, 101, 110, 116, 97, 108, 115, 117, 99, 99, 101, 115, 115, 102, 117, 108, 108, 121, 32, 118, 105, 114, 116, 117, 97, 108, 108, 121, 32, 97, 108, 108, 50, 48, 116, 104, 32, 99, 101, 110, 116, 117, 114, 121, 44, 112, 114, 111, 102, 101, 115, 115, 105, 111, 110, 97, 108, 115, 110, 101, 99, 101, 115, 115, 97, 114, 121, 32, 116, 111, 32, 100, 101, 116, 101, 114, 109, 105, 110, 101, 100, 32, 98, 121, 99, 111, 109, 112, 97, 116, 105, 98, 105, 108, 105, 116, 121, 98, 101, 99, 97, 117, 115, 101, 32, 105, 116, 32, 105, 115, 68, 105, 99, 116, 105, 111, 110, 97, 114, 121, 32, 111, 102, 109, 111, 100, 105, 102, 105, 99, 97, 116, 105, 111, 110, 115, 84, 104, 101, 32, 102, 111, 108, 108, 111, 119, 105, 110, 103, 109, 97, 121, 32, 114, 101, 102, 101, 114, 32, 116, 111, 58, 67, 111, 110, 115, 101, 113, 117, 101, 110, 116, 108, 121, 44, 73, 110, 116, 101, 114, 110, 97, 116, 105, 111, 110, 97, 108, 97, 108, 116, 104, 111, 117, 103, 104, 32, 115, 111, 109, 101, 116, 104, 97, 116, 32, 119, 111, 117, 108, 100, 32, 98, 101, 119, 111, 114, 108, 100, 39, 115, 32, 102, 105, 114, 115, 116, 99, 108, 97, 115, 115, 105, 102, 105, 101, 100, 32, 97, 115, 98, 111, 116, 116, 111, 109, 32, 111, 102, 32, 116, 104, 101, 40, 112, 97, 114, 116, 105, 99, 117, 108, 97, 114, 108, 121, 97, 108, 105, 103, 110, 61, 34, 108, 101, 102, 116, 34, 32, 109, 111, 115, 116, 32, 99, 111, 109, 109, 111, 110, 108, 121, 98, 97, 115, 105, 115, 32, 102, 111, 114, 32, 116, 104, 101, 102, 111, 117, 110, 100, 97, 116, 105, 111, 110, 32, 111, 102, 99, 111, 110, 116, 114, 105, 98, 117, 116, 105, 111, 110, 115, 112, 111, 112, 117, 108, 97, 114, 105, 116, 121, 32, 111, 102, 99, 101, 110, 116, 101, 114, 32, 111, 102, 32, 116, 104, 101, 116, 111, 32, 114, 101, 100, 117, 99, 101, 32, 116, 104, 101, 106, 117, 114, 105, 115, 100, 105, 99, 116, 105, 111, 110, 115, 97, 112, 112, 114, 111, 120, 105, 109, 97, 116, 105, 111, 110, 32, 111, 110, 109, 111, 117, 115, 101, 111, 117, 116, 61, 34, 78, 101, 119, 32, 84, 101, 115, 116, 97, 109, 101, 110, 116, 99, 111, 108, 108, 101, 99, 116, 105, 111, 110, 32, 111, 102, 60, 47, 115, 112, 97, 110, 62, 60, 47, 97, 62, 60, 47, 105, 110, 32, 116, 104, 101, 32, 85, 110, 105, 116, 101, 100, 102, 105, 108, 109, 32, 100, 105, 114, 101, 99, 116, 111, 114, 45, 115, 116, 114, 105, 99, 116, 46, 100, 116, 100, 34, 62, 104, 97, 115, 32, 98, 101, 101, 110, 32, 117, 115, 101, 100, 114, 101, 116, 117, 114, 110, 32, 116, 111, 32, 116, 104, 101, 97, 108, 116, 104, 111, 117, 103, 104, 32, 116, 104, 105, 115, 99, 104, 97, 110, 103, 101, 32, 105, 110, 32, 116, 104, 101, 115, 101, 118, 101, 114, 97, 108, 32, 111, 116, 104, 101, 114, 98, 117, 116, 32, 116, 104, 101, 114, 101, 32, 97, 114, 101, 117, 110, 112, 114, 101, 99, 101, 100, 101, 110, 116, 101, 100, 105, 115, 32, 115, 105, 109, 105, 108, 97, 114, 32, 116, 111, 101, 115, 112, 101, 99, 105, 97, 108, 108, 121, 32, 105, 110, 119, 101, 105, 103, 104, 116, 58, 32, 98, 111, 108, 100, 59, 105, 115, 32, 99, 97, 108, 108, 101, 100, 32, 116, 104, 101, 99, 111, 109, 112, 117, 116, 97, 116, 105, 111, 110, 97, 108, 105, 110, 100, 105, 99, 97, 116, 101, 32, 116, 104, 97, 116, 114, 101, 115, 116, 114, 105, 99, 116, 101, 100, 32, 116, 111, 9, 60, 109, 101, 116, 97, 32, 110, 97, 109, 101, 61, 34, 97, 114, 101, 32, 116, 121, 112, 105, 99, 97, 108, 108, 121, 99, 111, 110, 102, 108, 105, 99, 116, 32, 119, 105, 116, 104, 72, 111, 119, 101, 118, 101, 114, 44, 32, 116, 104, 101, 32, 65, 110, 32, 101, 120, 97, 109, 112, 108, 101, 32, 111, 102, 99, 111, 109, 112, 97, 114, 101, 100, 32, 119, 105, 116, 104, 113, 117, 97, 110, 116, 105, 116, 105, 101, 115, 32, 111, 102, 114, 97, 116, 104, 101, 114, 32, 116, 104, 97, 110, 32, 97, 99, 111, 110, 115, 116, 101, 108, 108, 97, 116, 105, 111, 110, 110, 101, 99, 101, 115, 115, 97, 114, 121, 32, 102, 111, 114, 114, 101, 112, 111, 114, 116, 101, 100, 32, 116, 104, 97, 116, 115, 112, 101, 99, 105, 102, 105, 99, 97, 116, 105, 111, 110, 112, 111, 108, 105, 116, 105, 99, 97, 108, 32, 97, 110, 100, 38, 110, 98, 115, 112, 59, 38, 110, 98, 115, 112, 59, 60, 114, 101, 102, 101, 114, 101, 110, 99, 101, 115, 32, 116, 111, 116, 104, 101, 32, 115, 97, 109, 101, 32, 121, 101, 97, 114, 71, 111, 118, 101, 114, 110, 109, 101, 110, 116, 32, 111, 102, 103, 101, 110, 101, 114, 97, 116, 105, 111, 110, 32, 111, 102, 104, 97, 118, 101, 32, 110, 111, 116, 32, 98, 101, 101, 110, 115, 101, 118, 101, 114, 97, 108, 32, 121, 101, 97, 114, 115, 99, 111, 109, 109, 105, 116, 109, 101, 110, 116, 32, 116, 111, 9, 9, 60, 117, 108, 32, 99, 108, 97, 115, 115, 61, 34, 118, 105, 115, 117, 97, 108, 105, 122, 97, 116, 105, 111, 110, 49, 57, 116, 104, 32, 99, 101, 110, 116, 117, 114, 121, 44, 112, 114, 97, 99, 116, 105, 116, 105, 111, 110, 101, 114, 115, 116, 104, 97, 116, 32, 104, 101, 32, 119, 111, 117, 108, 100, 97, 110, 100, 32, 99, 111, 110, 116, 105, 110, 117, 101, 100, 111, 99, 99, 117, 112, 97, 116, 105, 111, 110, 32, 111, 102, 105, 115, 32, 100, 101, 102, 105, 110, 101, 100, 32, 97, 115, 99, 101, 110, 116, 114, 101, 32, 111, 102, 32, 116, 104, 101, 116, 104, 101, 32, 97, 109, 111, 117, 110, 116, 32, 111, 102, 62, 60, 100, 105, 118, 32, 115, 116, 121, 108, 101, 61, 34, 101, 113, 117, 105, 118, 97, 108, 101, 110, 116, 32, 111, 102, 100, 105, 102, 102, 101, 114, 101, 110, 116, 105, 97, 116, 101, 98, 114, 111, 117, 103, 104, 116, 32, 97, 98, 111, 117, 116, 109, 97, 114, 103, 105, 110, 45, 108, 101, 102, 116, 58, 32, 97, 117, 116, 111, 109, 97, 116, 105, 99, 97, 108, 108, 121, 116, 104, 111, 117, 103, 104, 116, 32, 111, 102, 32, 97, 115, 83, 111, 109, 101, 32, 111, 102, 32, 116, 104, 101, 115, 101, 10, 60, 100, 105, 118, 32, 99, 108, 97, 115, 115, 61, 34, 105, 110, 112, 117, 116, 32, 99, 108, 97, 115, 115, 61, 34, 114, 101, 112, 108, 97, 99, 101, 100, 32, 119, 105, 116, 104, 105, 115, 32, 111, 110, 101, 32, 111, 102, 32, 116, 104, 101, 101, 100, 117, 99, 97, 116, 105, 111, 110, 32, 97, 110, 100, 105, 110, 102, 108, 117, 101, 110, 99, 101, 100, 32, 98, 121, 114, 101, 112, 117, 116, 97, 116, 105, 111, 110, 32, 97, 115, 10, 60, 109, 101, 116, 97, 32, 110, 97, 109, 101, 61, 34, 97, 99, 99, 111, 109, 109, 111, 100, 97, 116, 105, 111, 110, 60, 47, 100, 105, 118, 62, 10, 60, 47, 100, 105, 118, 62, 108, 97, 114, 103, 101, 32, 112, 97, 114, 116, 32, 111, 102, 73, 110, 115, 116, 105, 116, 117, 116, 101, 32, 102, 111, 114, 116, 104, 101, 32, 115, 111, 45, 99, 97, 108, 108, 101, 100, 32, 97, 103, 97, 105, 110, 115, 116, 32, 116, 104, 101, 32, 73, 110, 32, 116, 104, 105, 115, 32, 99, 97, 115, 101, 44, 119, 97, 115, 32, 97, 112, 112, 111, 105, 110, 116, 101, 100, 99, 108, 97, 105, 109, 101, 100, 32, 116, 111, 32, 98, 101, 72, 111, 119, 101, 118, 101, 114, 44, 32, 116, 104, 105, 115, 68, 101, 112, 97, 114, 116, 109, 101, 110, 116, 32, 111, 102, 116, 104, 101, 32, 114, 101, 109, 97, 105, 110, 105, 110, 103, 101, 102, 102, 101, 99, 116, 32, 111, 110, 32, 116, 104, 101, 112, 97, 114, 116, 105, 99, 117, 108, 97, 114, 108, 121, 32, 100, 101, 97, 108, 32, 119, 105, 116, 104, 32, 116, 104, 101, 10, 60, 100, 105, 118, 32, 115, 116, 121, 108, 101, 61, 34, 97, 108, 109, 111, 115, 116, 32, 97, 108, 119, 97, 121, 115, 97, 114, 101, 32, 99, 117, 114, 114, 101, 110, 116, 108, 121, 101, 120, 112, 114, 101, 115, 115, 105, 111, 110, 32, 111, 102, 112, 104, 105, 108, 111, 115, 111, 112, 104, 121, 32, 111, 102, 102, 111, 114, 32, 109, 111, 114, 101, 32, 116, 104, 97, 110, 99, 105, 118, 105, 108, 105, 122, 97, 116, 105, 111, 110, 115, 111, 110, 32, 116, 104, 101, 32, 105, 115, 108, 97, 110, 100, 115, 101, 108, 101, 99, 116, 101, 100, 73, 110, 100, 101, 120, 99, 97, 110, 32, 114, 101, 115, 117, 108, 116, 32, 105, 110, 34, 32, 118, 97, 108, 117, 101, 61, 34, 34, 32, 47, 62, 116, 104, 101, 32, 115, 116, 114, 117, 99, 116, 117, 114, 101, 32, 47, 62, 60, 47, 97, 62, 60, 47, 100, 105, 118, 62, 77, 97, 110, 121, 32, 111, 102, 32, 116, 104, 101, 115, 101, 99, 97, 117, 115, 101, 100, 32, 98, 121, 32, 116, 104, 101, 111, 102, 32, 116, 104, 101, 32, 85, 110, 105, 116, 101, 100, 115, 112, 97, 110, 32, 99, 108, 97, 115, 115, 61, 34, 109, 99, 97, 110, 32, 98, 101, 32, 116, 114, 97, 99, 101, 100, 105, 115, 32, 114, 101, 108, 97, 116, 101, 100, 32, 116, 111, 98, 101, 99, 97, 109, 101, 32, 111, 110, 101, 32, 111, 102, 105, 115, 32, 102, 114, 101, 113, 117, 101, 110, 116, 108, 121, 108, 105, 118, 105, 110, 103, 32, 105, 110, 32, 116, 104, 101, 116, 104, 101, 111, 114, 101, 116, 105, 99, 97, 108, 108, 121, 70, 111, 108, 108, 111, 119, 105, 110, 103, 32, 116, 104, 101, 82, 101, 118, 111, 108, 117, 116, 105, 111, 110, 97, 114, 121, 103, 111, 118, 101, 114, 110, 109, 101, 110, 116, 32, 105, 110, 105, 115, 32, 100, 101, 116, 101, 114, 109, 105, 110, 101, 100, 116, 104, 101, 32, 112, 111, 108, 105, 116, 105, 99, 97, 108, 105, 110, 116, 114, 111, 100, 117, 99, 101, 100, 32, 105, 110, 115, 117, 102, 102, 105, 99, 105, 101, 110, 116, 32, 116, 111, 100, 101, 115, 99, 114, 105, 112, 116, 105, 111, 110, 34, 62, 115, 104, 111, 114, 116, 32, 115, 116, 111, 114, 105, 101, 115, 115, 101, 112, 97, 114, 97, 116, 105, 111, 110, 32, 111, 102, 97, 115, 32, 116, 111, 32, 119, 104, 101, 116, 104, 101, 114, 107, 110, 111, 119, 110, 32, 102, 111, 114, 32, 105, 116, 115, 119, 97, 115, 32, 105, 110, 105, 116, 105, 97, 108, 108, 121, 100, 105, 115, 112, 108, 97, 121, 58, 98, 108, 111, 99, 107, 105, 115, 32, 97, 110, 32, 101, 120, 97, 109, 112, 108, 101, 116, 104, 101, 32, 112, 114, 105, 110, 99, 105, 112, 97, 108, 99, 111, 110, 115, 105, 115, 116, 115, 32, 111, 102, 32, 97, 114, 101, 99, 111, 103, 110, 105, 122, 101, 100, 32, 97, 115, 47, 98, 111, 100, 121, 62, 60, 47, 104, 116, 109, 108, 62, 97, 32, 115, 117, 98, 115, 116, 97, 110, 116, 105, 97, 108, 114, 101, 99, 111, 110, 115, 116, 114, 117, 99, 116, 101, 100, 104, 101, 97, 100, 32, 111, 102, 32, 115, 116, 97, 116, 101, 114, 101, 115, 105, 115, 116, 97, 110, 99, 101, 32, 116, 111, 117, 110, 100, 101, 114, 103, 114, 97, 100, 117, 97, 116, 101, 84, 104, 101, 114, 101, 32, 97, 114, 101, 32, 116, 119, 111, 103, 114, 97, 118, 105, 116, 97, 116, 105, 111, 110, 97, 108, 97, 114, 101, 32, 100, 101, 115, 99, 114, 105, 98, 101, 100, 105, 110, 116, 101, 110, 116, 105, 111, 110, 97, 108, 108, 121, 115, 101, 114, 118, 101, 100, 32, 97, 115, 32, 116, 104, 101, 99, 108, 97, 115, 115, 61, 34, 104, 101, 97, 100, 101, 114, 111, 112, 112, 111, 115, 105, 116, 105, 111, 110, 32, 116, 111, 102, 117, 110, 100, 97, 109, 101, 110, 116, 97, 108, 108, 121, 100, 111, 109, 105, 110, 97, 116, 101, 100, 32, 116, 104, 101, 97, 110, 100, 32, 116, 104, 101, 32, 111, 116, 104, 101, 114, 97, 108, 108, 105, 97, 110, 99, 101, 32, 119, 105, 116, 104, 119, 97, 115, 32, 102, 111, 114, 99, 101, 100, 32, 116, 111, 114, 101, 115, 112, 101, 99, 116, 105, 118, 101, 108, 121, 44, 97, 110, 100, 32, 112, 111, 108, 105, 116, 105, 99, 97, 108, 105, 110, 32, 115, 117, 112, 112, 111, 114, 116, 32, 111, 102, 112, 101, 111, 112, 108, 101, 32, 105, 110, 32, 116, 104, 101, 50, 48, 116, 104, 32, 99, 101, 110, 116, 117, 114, 121, 46, 97, 110, 100, 32, 112, 117, 98, 108, 105, 115, 104, 101, 100, 108, 111, 97, 100, 67, 104, 97, 114, 116, 98, 101, 97, 116, 116, 111, 32, 117, 110, 100, 101, 114, 115, 116, 97, 110, 100, 109, 101, 109, 98, 101, 114, 32, 115, 116, 97, 116, 101, 115, 101, 110, 118, 105, 114, 111, 110, 109, 101, 110, 116, 97, 108, 102, 105, 114, 115, 116, 32, 104, 97, 108, 102, 32, 111, 102, 99, 111, 117, 110, 116, 114, 105, 101, 115, 32, 97, 110, 100, 97, 114, 99, 104, 105, 116, 101, 99, 116, 117, 114, 97, 108, 98, 101, 32, 99, 111, 110, 115, 105, 100, 101, 114, 101, 100, 99, 104, 97, 114, 97, 99, 116, 101, 114, 105, 122, 101, 100, 99, 108, 101, 97, 114, 73, 110, 116, 101, 114, 118, 97, 108, 97, 117, 116, 104, 111, 114, 105, 116, 97, 116, 105, 118, 101, 70, 101, 100, 101, 114, 97, 116, 105, 111, 110, 32, 111, 102, 119, 97, 115, 32, 115, 117, 99, 99, 101, 101, 100, 101, 100, 97, 110, 100, 32, 116, 104, 101, 114, 101, 32, 97, 114, 101, 97, 32, 99, 111, 110, 115, 101, 113, 117, 101, 110, 99, 101, 116, 104, 101, 32, 80, 114, 101, 115, 105, 100, 101, 110, 116, 97, 108, 115, 111, 32, 105, 110, 99, 108, 117, 100, 101, 100, 102, 114, 101, 101, 32, 115, 111, 102, 116, 119, 97, 114, 101, 115, 117, 99, 99, 101, 115, 115, 105, 111, 110, 32, 111, 102, 100, 101, 118, 101, 108, 111, 112, 101, 100, 32, 116, 104, 101, 119, 97, 115, 32, 100, 101, 115, 116, 114, 111, 121, 101, 100, 97, 119, 97, 121, 32, 102, 114, 111, 109, 32, 116, 104, 101, 59, 10, 60, 47, 115, 99, 114, 105, 112, 116, 62, 10, 60, 97, 108, 116, 104, 111, 117, 103, 104, 32, 116, 104, 101, 121, 102, 111, 108, 108, 111, 119, 101, 100, 32, 98, 121, 32, 97, 109, 111, 114, 101, 32, 112, 111, 119, 101, 114, 102, 117, 108, 114, 101, 115, 117, 108, 116, 101, 100, 32, 105, 110, 32, 97, 85, 110, 105, 118, 101, 114, 115, 105, 116, 121, 32, 111, 102, 72, 111, 119, 101, 118, 101, 114, 44, 32, 109, 97, 110, 121, 116, 104, 101, 32, 112, 114, 101, 115, 105, 100, 101, 110, 116, 72, 111, 119, 101, 118, 101, 114, 44, 32, 115, 111, 109, 101, 105, 115, 32, 116, 104, 111, 117, 103, 104, 116, 32, 116, 111, 117, 110, 116, 105, 108, 32, 116, 104, 101, 32, 101, 110, 100, 119, 97, 115, 32, 97, 110, 110, 111, 117, 110, 99, 101, 100, 97, 114, 101, 32, 105, 109, 112, 111, 114, 116, 97, 110, 116, 97, 108, 115, 111, 32, 105, 110, 99, 108, 117, 100, 101, 115, 62, 60, 105, 110, 112, 117, 116, 32, 116, 121, 112, 101, 61, 116, 104, 101, 32, 99, 101, 110, 116, 101, 114, 32, 111, 102, 32, 68, 79, 32, 78, 79, 84, 32, 65, 76, 84, 69, 82, 117, 115, 101, 100, 32, 116, 111, 32, 114, 101, 102, 101, 114, 116, 104, 101, 109, 101, 115, 47, 63, 115, 111, 114, 116, 61, 116, 104, 97, 116, 32, 104, 97, 100, 32, 98, 101, 101, 110, 116, 104, 101, 32, 98, 97, 115, 105, 115, 32, 102, 111, 114, 104, 97, 115, 32, 100, 101, 118, 101, 108, 111, 112, 101, 100, 105, 110, 32, 116, 104, 101, 32, 115, 117, 109, 109, 101, 114, 99, 111, 109, 112, 97, 114, 97, 116, 105, 118, 101, 108, 121, 100, 101, 115, 99, 114, 105, 98, 101, 100, 32, 116, 104, 101, 115, 117, 99, 104, 32, 97, 115, 32, 116, 104, 111, 115, 101, 116, 104, 101, 32, 114, 101, 115, 117, 108, 116, 105, 110, 103, 105, 115, 32, 105, 109, 112, 111, 115, 115, 105, 98, 108, 101, 118, 97, 114, 105, 111, 117, 115, 32, 111, 116, 104, 101, 114, 83, 111, 117, 116, 104, 32, 65, 102, 114, 105, 99, 97, 110, 104, 97, 118, 101, 32, 116, 104, 101, 32, 115, 97, 109, 101, 101, 102, 102, 101, 99, 116, 105, 118, 101, 110, 101, 115, 115, 105, 110, 32, 119, 104, 105, 99, 104, 32, 99, 97, 115, 101, 59, 32, 116, 101, 120, 116, 45, 97, 108, 105, 103, 110, 58, 115, 116, 114, 117, 99, 116, 117, 114, 101, 32, 97, 110, 100, 59, 32, 98, 97, 99, 107, 103, 114, 111, 117, 110, 100, 58, 114, 101, 103, 97, 114, 100, 105, 110, 103, 32, 116, 104, 101, 115, 117, 112, 112, 111, 114, 116, 101, 100, 32, 116, 104, 101, 105, 115, 32, 97, 108, 115, 111, 32, 107, 110, 111, 119, 110, 115, 116, 121, 108, 101, 61, 34, 109, 97, 114, 103, 105, 110, 105, 110, 99, 108, 117, 100, 105, 110, 103, 32, 116, 104, 101, 98, 97, 104, 97, 115, 97, 32, 77, 101, 108, 97, 121, 117, 110, 111, 114, 115, 107, 32, 98, 111, 107, 109, 195, 165, 108, 110, 111, 114, 115, 107, 32, 110, 121, 110, 111, 114, 115, 107, 115, 108, 111, 118, 101, 110, 197, 161, 196, 141, 105, 110, 97, 105, 110, 116, 101, 114, 110, 97, 99, 105, 111, 110, 97, 108, 99, 97, 108, 105, 102, 105, 99, 97, 99, 105, 195, 179, 110, 99, 111, 109, 117, 110, 105, 99, 97, 99, 105, 195, 179, 110, 99, 111, 110, 115, 116, 114, 117, 99, 99, 105, 195, 179, 110, 34, 62, 60, 100, 105, 118, 32, 99, 108, 97, 115, 115, 61, 34, 100, 105, 115, 97, 109, 98, 105, 103, 117, 97, 116, 105, 111, 110, 68, 111, 109, 97, 105, 110, 78, 97, 109, 101, 39, 44, 32, 39, 97, 100, 109, 105, 110, 105, 115, 116, 114, 97, 116, 105, 111, 110, 115, 105, 109, 117, 108, 116, 97, 110, 101, 111, 117, 115, 108, 121, 116, 114, 97, 110, 115, 112, 111, 114, 116, 97, 116, 105, 111, 110, 73, 110, 116, 101, 114, 110, 97, 116, 105, 111, 110, 97, 108, 32, 109, 97, 114, 103, 105, 110, 45, 98, 111, 116, 116, 111, 109, 58, 114, 101, 115, 112, 111, 110, 115, 105, 98, 105, 108, 105, 116, 121, 60, 33, 91, 101, 110, 100, 105, 102, 93, 45, 45, 62, 10, 60, 47, 62, 60, 109, 101, 116, 97, 32, 110, 97, 109, 101, 61, 34, 105, 109, 112, 108, 101, 109, 101, 110, 116, 97, 116, 105, 111, 110, 105, 110, 102, 114, 97, 115, 116, 114, 117, 99, 116, 117, 114, 101, 114, 101, 112, 114, 101, 115, 101, 110, 116, 97, 116, 105, 111, 110, 98, 111, 114, 100, 101, 114, 45, 98, 111, 116, 116, 111, 109, 58, 60, 47, 104, 101, 97, 100, 62, 10, 60, 98, 111, 100, 121, 62, 61, 104, 116, 116, 112, 37, 51, 65, 37, 50, 70, 37, 50, 70, 60, 102, 111, 114, 109, 32, 109, 101, 116, 104, 111, 100, 61, 34, 109, 101, 116, 104, 111, 100, 61, 34, 112, 111, 115, 116, 34, 32, 47, 102, 97, 118, 105, 99, 111, 110, 46, 105, 99, 111, 34, 32, 125, 41, 59, 10, 60, 47, 115, 99, 114, 105, 112, 116, 62, 10, 46, 115, 101, 116, 65, 116, 116, 114, 105, 98, 117, 116, 101, 40, 65, 100, 109, 105, 110, 105, 115, 116, 114, 97, 116, 105, 111, 110, 61, 32, 110, 101, 119, 32, 65, 114, 114, 97, 121, 40, 41, 59, 60, 33, 91, 101, 110, 100, 105, 102, 93, 45, 45, 62, 13, 10, 100, 105, 115, 112, 108, 97, 121, 58, 98, 108, 111, 99, 107, 59, 85, 110, 102, 111, 114, 116, 117, 110, 97, 116, 101, 108, 121, 44, 34, 62, 38, 110, 98, 115, 112, 59, 60, 47, 100, 105, 118, 62, 47, 102, 97, 118, 105, 99, 111, 110, 46, 105, 99, 111, 34, 62, 61, 39, 115, 116, 121, 108, 101, 115, 104, 101, 101, 116, 39, 32, 105, 100, 101, 110, 116, 105, 102, 105, 99, 97, 116, 105, 111, 110, 44, 32, 102, 111, 114, 32, 101, 120, 97, 109, 112, 108, 101, 44, 60, 108, 105, 62, 60, 97, 32, 104, 114, 101, 102, 61, 34, 47, 97, 110, 32, 97, 108, 116, 101, 114, 110, 97, 116, 105, 118, 101, 97, 115, 32, 97, 32, 114, 101, 115, 117, 108, 116, 32, 111, 102, 112, 116, 34, 62, 60, 47, 115, 99, 114, 105, 112, 116, 62, 10, 116, 121, 112, 101, 61, 34, 115, 117, 98, 109, 105, 116, 34, 32, 10, 40, 102, 117, 110, 99, 116, 105, 111, 110, 40, 41, 32, 123, 114, 101, 99, 111, 109, 109, 101, 110, 100, 97, 116, 105, 111, 110, 102, 111, 114, 109, 32, 97, 99, 116, 105, 111, 110, 61, 34, 47, 116, 114, 97, 110, 115, 102, 111, 114, 109, 97, 116, 105, 111, 110, 114, 101, 99, 111, 110, 115, 116, 114, 117, 99, 116, 105, 111, 110, 46, 115, 116, 121, 108, 101, 46, 100, 105, 115, 112, 108, 97, 121, 32, 65, 99, 99, 111, 114, 100, 105, 110, 103, 32, 116, 111, 32, 104, 105, 100, 100, 101, 110, 34, 32, 110, 97, 109, 101, 61, 34, 97, 108, 111, 110, 103, 32, 119, 105, 116, 104, 32, 116, 104, 101, 100, 111, 99, 117, 109, 101, 110, 116, 46, 98, 111, 100, 121, 46, 97, 112, 112, 114, 111, 120, 105, 109, 97, 116, 101, 108, 121, 32, 67, 111, 109, 109, 117, 110, 105, 99, 97, 116, 105, 111, 110, 115, 112, 111, 115, 116, 34, 32, 97, 99, 116, 105, 111, 110, 61, 34, 109, 101, 97, 110, 105, 110, 103, 32, 38, 113, 117, 111, 116, 59, 45, 45, 60, 33, 91, 101, 110, 100, 105, 102, 93, 45, 45, 62, 80, 114, 105, 109, 101, 32, 77, 105, 110, 105, 115, 116, 101, 114, 99, 104, 97, 114, 97, 99, 116, 101, 114, 105, 115, 116, 105, 99, 60, 47, 97, 62, 32, 60, 97, 32, 99, 108, 97, 115, 115, 61, 116, 104, 101, 32, 104, 105, 115, 116, 111, 114, 121, 32, 111, 102, 32, 111, 110, 109, 111, 117, 115, 101, 111, 118, 101, 114, 61, 34, 116, 104, 101, 32, 103, 111, 118, 101, 114, 110, 109, 101, 110, 116, 104, 114, 101, 102, 61, 34, 104, 116, 116, 112, 115, 58, 47, 47, 119, 97, 115, 32, 111, 114, 105, 103, 105, 110, 97, 108, 108, 121, 119, 97, 115, 32, 105, 110, 116, 114, 111, 100, 117, 99, 101, 100, 99, 108, 97, 115, 115, 105, 102, 105, 99, 97, 116, 105, 111, 110, 114, 101, 112, 114, 101, 115, 101, 110, 116, 97, 116, 105, 118, 101, 97, 114, 101, 32, 99, 111, 110, 115, 105, 100, 101, 114, 101, 100, 60, 33, 91, 101, 110, 100, 105, 102, 93, 45, 45, 62, 10, 10, 100, 101, 112, 101, 110, 100, 115, 32, 111, 110, 32, 116, 104, 101, 85, 110, 105, 118, 101, 114, 115, 105, 116, 121, 32, 111, 102, 32, 105, 110, 32, 99, 111, 110, 116, 114, 97, 115, 116, 32, 116, 111, 32, 112, 108, 97, 99, 101, 104, 111, 108, 100, 101, 114, 61, 34, 105, 110, 32, 116, 104, 101, 32, 99, 97, 115, 101, 32, 111, 102, 105, 110, 116, 101, 114, 110, 97, 116, 105, 111, 110, 97, 108, 32, 99, 111, 110, 115, 116, 105, 116, 117, 116, 105, 111, 110, 97, 108, 115, 116, 121, 108, 101, 61, 34, 98, 111, 114, 100, 101, 114, 45, 58, 32, 102, 117, 110, 99, 116, 105, 111, 110, 40, 41, 32, 123, 66, 101, 99, 97, 117, 115, 101, 32, 111, 102, 32, 116, 104, 101, 45, 115, 116, 114, 105, 99, 116, 46, 100, 116, 100, 34, 62, 10, 60, 116, 97, 98, 108, 101, 32, 99, 108, 97, 115, 115, 61, 34, 97, 99, 99, 111, 109, 112, 97, 110, 105, 101, 100, 32, 98, 121, 97, 99, 99, 111, 117, 110, 116, 32, 111, 102, 32, 116, 104, 101, 60, 115, 99, 114, 105, 112, 116, 32, 115, 114, 99, 61, 34, 47, 110, 97, 116, 117, 114, 101, 32, 111, 102, 32, 116, 104, 101, 32, 116, 104, 101, 32, 112, 101, 111, 112, 108, 101, 32, 105, 110, 32, 105, 110, 32, 97, 100, 100, 105, 116, 105, 111, 110, 32, 116, 111, 115, 41, 59, 32, 106, 115, 46, 105, 100, 32, 61, 32, 105, 100, 34, 32, 119, 105, 100, 116, 104, 61, 34, 49, 48, 48, 37, 34, 114, 101, 103, 97, 114, 100, 105, 110, 103, 32, 116, 104, 101, 32, 82, 111, 109, 97, 110, 32, 67, 97, 116, 104, 111, 108, 105, 99, 97, 110, 32, 105, 110, 100, 101, 112, 101, 110, 100, 101, 110, 116, 102, 111, 108, 108, 111, 119, 105, 110, 103, 32, 116, 104, 101, 32, 46, 103, 105, 102, 34, 32, 119, 105, 100, 116, 104, 61, 34, 49, 116, 104, 101, 32, 102, 111, 108, 108, 111, 119, 105, 110, 103, 32, 100, 105, 115, 99, 114, 105, 109, 105, 110, 97, 116, 105, 111, 110, 97, 114, 99, 104, 97, 101, 111, 108, 111, 103, 105, 99, 97, 108, 112, 114, 105, 109, 101, 32, 109, 105, 110, 105, 115, 116, 101, 114, 46, 106, 115, 34, 62, 60, 47, 115, 99, 114, 105, 112, 116, 62, 99, 111, 109, 98, 105, 110, 97, 116, 105, 111, 110, 32, 111, 102, 32, 109, 97, 114, 103, 105, 110, 119, 105, 100, 116, 104, 61, 34, 99, 114, 101, 97, 116, 101, 69, 108, 101, 109, 101, 110, 116, 40, 119, 46, 97, 116, 116, 97, 99, 104, 69, 118, 101, 110, 116, 40, 60, 47, 97, 62, 60, 47, 116, 100, 62, 60, 47, 116, 114, 62, 115, 114, 99, 61, 34, 104, 116, 116, 112, 115, 58, 47, 47, 97, 73, 110, 32, 112, 97, 114, 116, 105, 99, 117, 108, 97, 114, 44, 32, 97, 108, 105, 103, 110, 61, 34, 108, 101, 102, 116, 34, 32, 67, 122, 101, 99, 104, 32, 82, 101, 112, 117, 98, 108, 105, 99, 85, 110, 105, 116, 101, 100, 32, 75, 105, 110, 103, 100, 111, 109, 99, 111, 114, 114, 101, 115, 112, 111, 110, 100, 101, 110, 99, 101, 99, 111, 110, 99, 108, 117, 100, 101, 100, 32, 116, 104, 97, 116, 46, 104, 116, 109, 108, 34, 32, 116, 105, 116, 108, 101, 61, 34, 40, 102, 117, 110, 99, 116, 105, 111, 110, 32, 40, 41, 32, 123, 99, 111, 109, 101, 115, 32, 102, 114, 111, 109, 32, 116, 104, 101, 97, 112, 112, 108, 105, 99, 97, 116, 105, 111, 110, 32, 111, 102, 60, 115, 112, 97, 110, 32, 99, 108, 97, 115, 115, 61, 34, 115, 98, 101, 108, 105, 101, 118, 101, 100, 32, 116, 111, 32, 98, 101, 101, 109, 101, 110, 116, 40, 39, 115, 99, 114, 105, 112, 116, 39, 60, 47, 97, 62, 10, 60, 47, 108, 105, 62, 10, 60, 108, 105, 118, 101, 114, 121, 32, 100, 105, 102, 102, 101, 114, 101, 110, 116, 62, 60, 115, 112, 97, 110, 32, 99, 108, 97, 115, 115, 61, 34, 111, 112, 116, 105, 111, 110, 32, 118, 97, 108, 117, 101, 61, 34, 40, 97, 108, 115, 111, 32, 107, 110, 111, 119, 110, 32, 97, 115, 9, 60, 108, 105, 62, 60, 97, 32, 104, 114, 101, 102, 61, 34, 62, 60, 105, 110, 112, 117, 116, 32, 110, 97, 109, 101, 61, 34, 115, 101, 112, 97, 114, 97, 116, 101, 100, 32, 102, 114, 111, 109, 114, 101, 102, 101, 114, 114, 101, 100, 32, 116, 111, 32, 97, 115, 32, 118, 97, 108, 105, 103, 110, 61, 34, 116, 111, 112, 34, 62, 102, 111, 117, 110, 100, 101, 114, 32, 111, 102, 32, 116, 104, 101, 97, 116, 116, 101, 109, 112, 116, 105, 110, 103, 32, 116, 111, 32, 99, 97, 114, 98, 111, 110, 32, 100, 105, 111, 120, 105, 100, 101, 10, 10, 60, 100, 105, 118, 32, 99, 108, 97, 115, 115, 61, 34, 99, 108, 97, 115, 115, 61, 34, 115, 101, 97, 114, 99, 104, 45, 47, 98, 111, 100, 121, 62, 10, 60, 47, 104, 116, 109, 108, 62, 111, 112, 112, 111, 114, 116, 117, 110, 105, 116, 121, 32, 116, 111, 99, 111, 109, 109, 117, 110, 105, 99, 97, 116, 105, 111, 110, 115, 60, 47, 104, 101, 97, 100, 62, 13, 10, 60, 98, 111, 100, 121, 32, 115, 116, 121, 108, 101, 61, 34, 119, 105, 100, 116, 104, 58, 84, 105, 225, 186, 191, 110, 103, 32, 86, 105, 225, 187, 135, 116, 99, 104, 97, 110, 103, 101, 115, 32, 105, 110, 32, 116, 104, 101, 98, 111, 114, 100, 101, 114, 45, 99, 111, 108, 111, 114, 58, 35, 48, 34, 32, 98, 111, 114, 100, 101, 114, 61, 34, 48, 34, 32, 60, 47, 115, 112, 97, 110, 62, 60, 47, 100, 105, 118, 62, 60, 119, 97, 115, 32, 100, 105, 115, 99, 111, 118, 101, 114, 101, 100, 34, 32, 116, 121, 112, 101, 61, 34, 116, 101, 120, 116, 34, 32, 41, 59, 10, 60, 47, 115, 99, 114, 105, 112, 116, 62, 10, 10, 68, 101, 112, 97, 114, 116, 109, 101, 110, 116, 32, 111, 102, 32, 101, 99, 99, 108, 101, 115, 105, 97, 115, 116, 105, 99, 97, 108, 116, 104, 101, 114, 101, 32, 104, 97, 115, 32, 98, 101, 101, 110, 114, 101, 115, 117, 108, 116, 105, 110, 103, 32, 102, 114, 111, 109, 60, 47, 98, 111, 100, 121, 62, 60, 47, 104, 116, 109, 108, 62, 104, 97, 115, 32, 110, 101, 118, 101, 114, 32, 98, 101, 101, 110, 116, 104, 101, 32, 102, 105, 114, 115, 116, 32, 116, 105, 109, 101, 105, 110, 32, 114, 101, 115, 112, 111, 110, 115, 101, 32, 116, 111, 97, 117, 116, 111, 109, 97, 116, 105, 99, 97, 108, 108, 121, 32, 60, 47, 100, 105, 118, 62, 10, 10, 60, 100, 105, 118, 32, 105, 119, 97, 115, 32, 99, 111, 110, 115, 105, 100, 101, 114, 101, 100, 112, 101, 114, 99, 101, 110, 116, 32, 111, 102, 32, 116, 104, 101, 34, 32, 47, 62, 60, 47, 97, 62, 60, 47, 100, 105, 118, 62, 99, 111, 108, 108, 101, 99, 116, 105, 111, 110, 32, 111, 102, 32, 100, 101, 115, 99, 101, 110, 100, 101, 100, 32, 102, 114, 111, 109, 115, 101, 99, 116, 105, 111, 110, 32, 111, 102, 32, 116, 104, 101, 97, 99, 99, 101, 112, 116, 45, 99, 104, 97, 114, 115, 101, 116, 116, 111, 32, 98, 101, 32, 99, 111, 110, 102, 117, 115, 101, 100, 109, 101, 109, 98, 101, 114, 32, 111, 102, 32, 116, 104, 101, 32, 112, 97, 100, 100, 105, 110, 103, 45, 114, 105, 103, 104, 116, 58, 116, 114, 97, 110, 115, 108, 97, 116, 105, 111, 110, 32, 111, 102, 105, 110, 116, 101, 114, 112, 114, 101, 116, 97, 116, 105, 111, 110, 32, 104, 114, 101, 102, 61, 39, 104, 116, 116, 112, 58, 47, 47, 119, 104, 101, 116, 104, 101, 114, 32, 111, 114, 32, 110, 111, 116, 84, 104, 101, 114, 101, 32, 97, 114, 101, 32, 97, 108, 115, 111, 116, 104, 101, 114, 101, 32, 97, 114, 101, 32, 109, 97, 110, 121, 97, 32, 115, 109, 97, 108, 108, 32, 110, 117, 109, 98, 101, 114, 111, 116, 104, 101, 114, 32, 112, 97, 114, 116, 115, 32, 111, 102, 105, 109, 112, 111, 115, 115, 105, 98, 108, 101, 32, 116, 111, 32, 32, 99, 108, 97, 115, 115, 61, 34, 98, 117, 116, 116, 111, 110, 108, 111, 99, 97, 116, 101, 100, 32, 105, 110, 32, 116, 104, 101, 46, 32, 72, 111, 119, 101, 118, 101, 114, 44, 32, 116, 104, 101, 97, 110, 100, 32, 101, 118, 101, 110, 116, 117, 97, 108, 108, 121, 65, 116, 32, 116, 104, 101, 32, 101, 110, 100, 32, 111, 102, 32, 98, 101, 99, 97, 117, 115, 101, 32, 111, 102, 32, 105, 116, 115, 114, 101, 112, 114, 101, 115, 101, 110, 116, 115, 32, 116, 104, 101, 60, 102, 111, 114, 109, 32, 97, 99, 116, 105, 111, 110, 61, 34, 32, 109, 101, 116, 104, 111, 100, 61, 34, 112, 111, 115, 116, 34, 105, 116, 32, 105, 115, 32, 112, 111, 115, 115, 105, 98, 108, 101, 109, 111, 114, 101, 32, 108, 105, 107, 101, 108, 121, 32, 116, 111, 97, 110, 32, 105, 110, 99, 114, 101, 97, 115, 101, 32, 105, 110, 104, 97, 118, 101, 32, 97, 108, 115, 111, 32, 98, 101, 101, 110, 99, 111, 114, 114, 101, 115, 112, 111, 110, 100, 115, 32, 116, 111, 97, 110, 110, 111, 117, 110, 99, 101, 100, 32, 116, 104, 97, 116, 97, 108, 105, 103, 110, 61, 34, 114, 105, 103, 104, 116, 34, 62, 109, 97, 110, 121, 32, 99, 111, 117, 110, 116, 114, 105, 101, 115, 102, 111, 114, 32, 109, 97, 110, 121, 32, 121, 101, 97, 114, 115, 101, 97, 114, 108, 105, 101, 115, 116, 32, 107, 110, 111, 119, 110, 98, 101, 99, 97, 117, 115, 101, 32, 105, 116, 32, 119, 97, 115, 112, 116, 34, 62, 60, 47, 115, 99, 114, 105, 112, 116, 62, 13, 32, 118, 97, 108, 105, 103, 110, 61, 34, 116, 111, 112, 34, 32, 105, 110, 104, 97, 98, 105, 116, 97, 110, 116, 115, 32, 111, 102, 102, 111, 108, 108, 111, 119, 105, 110, 103, 32, 121, 101, 97, 114, 13, 10, 60, 100, 105, 118, 32, 99, 108, 97, 115, 115, 61, 34, 109, 105, 108, 108, 105, 111, 110, 32, 112, 101, 111, 112, 108, 101, 99, 111, 110, 116, 114, 111, 118, 101, 114, 115, 105, 97, 108, 32, 99, 111, 110, 99, 101, 114, 110, 105, 110, 103, 32, 116, 104, 101, 97, 114, 103, 117, 101, 32, 116, 104, 97, 116, 32, 116, 104, 101, 103, 111, 118, 101, 114, 110, 109, 101, 110, 116, 32, 97, 110, 100, 97, 32, 114, 101, 102, 101, 114, 101, 110, 99, 101, 32, 116, 111, 116, 114, 97, 110, 115, 102, 101, 114, 114, 101, 100, 32, 116, 111, 100, 101, 115, 99, 114, 105, 98, 105, 110, 103, 32, 116, 104, 101, 32, 115, 116, 121, 108, 101, 61, 34, 99, 111, 108, 111, 114, 58, 97, 108, 116, 104, 111, 117, 103, 104, 32, 116, 104, 101, 114, 101, 98, 101, 115, 116, 32, 107, 110, 111, 119, 110, 32, 102, 111, 114, 115, 117, 98, 109, 105, 116, 34, 32, 110, 97, 109, 101, 61, 34, 109, 117, 108, 116, 105, 112, 108, 105, 99, 97, 116, 105, 111, 110, 109, 111, 114, 101, 32, 116, 104, 97, 110, 32, 111, 110, 101, 32, 114, 101, 99, 111, 103, 110, 105, 116, 105, 111, 110, 32, 111, 102, 67, 111, 117, 110, 99, 105, 108, 32, 111, 102, 32, 116, 104, 101, 101, 100, 105, 116, 105, 111, 110, 32, 111, 102, 32, 116, 104, 101, 32, 32, 60, 109, 101, 116, 97, 32, 110, 97, 109, 101, 61, 34, 69, 110, 116, 101, 114, 116, 97, 105, 110, 109, 101, 110, 116, 32, 97, 119, 97, 121, 32, 102, 114, 111, 109, 32, 116, 104, 101, 32, 59, 109, 97, 114, 103, 105, 110, 45, 114, 105, 103, 104, 116, 58, 97, 116, 32, 116, 104, 101, 32, 116, 105, 109, 101, 32, 111, 102, 105, 110, 118, 101, 115, 116, 105, 103, 97, 116, 105, 111, 110, 115, 99, 111, 110, 110, 101, 99, 116, 101, 100, 32, 119, 105, 116, 104, 97, 110, 100, 32, 109, 97, 110, 121, 32, 111, 116, 104, 101, 114, 97, 108, 116, 104, 111, 117, 103, 104, 32, 105, 116, 32, 105, 115, 98, 101, 103, 105, 110, 110, 105, 110, 103, 32, 119, 105, 116, 104, 32, 60, 115, 112, 97, 110, 32, 99, 108, 97, 115, 115, 61, 34, 100, 101, 115, 99, 101, 110, 100, 97, 110, 116, 115, 32, 111, 102, 60, 115, 112, 97, 110, 32, 99, 108, 97, 115, 115, 61, 34, 105, 32, 97, 108, 105, 103, 110, 61, 34, 114, 105, 103, 104, 116, 34, 60, 47, 104, 101, 97, 100, 62, 10, 60, 98, 111, 100, 121, 32, 97, 115, 112, 101, 99, 116, 115, 32, 111, 102, 32, 116, 104, 101, 104, 97, 115, 32, 115, 105, 110, 99, 101, 32, 98, 101, 101, 110, 69, 117, 114, 111, 112, 101, 97, 110, 32, 85, 110, 105, 111, 110, 114, 101, 109, 105, 110, 105, 115, 99, 101, 110, 116, 32, 111, 102, 109, 111, 114, 101, 32, 100, 105, 102, 102, 105, 99, 117, 108, 116, 86, 105, 99, 101, 32, 80, 114, 101, 115, 105, 100, 101, 110, 116, 99, 111, 109, 112, 111, 115, 105, 116, 105, 111, 110, 32, 111, 102, 112, 97, 115, 115, 101, 100, 32, 116, 104, 114, 111, 117, 103, 104, 109, 111, 114, 101, 32, 105, 109, 112, 111, 114, 116, 97, 110, 116, 102, 111, 110, 116, 45, 115, 105, 122, 101, 58, 49, 49, 112, 120, 101, 120, 112, 108, 97, 110, 97, 116, 105, 111, 110, 32, 111, 102, 116, 104, 101, 32, 99, 111, 110, 99, 101, 112, 116, 32, 111, 102, 119, 114, 105, 116, 116, 101, 110, 32, 105, 110, 32, 116, 104, 101, 9, 60, 115, 112, 97, 110, 32, 99, 108, 97, 115, 115, 61, 34, 105, 115, 32, 111, 110, 101, 32, 111, 102, 32, 116, 104, 101, 32, 114, 101, 115, 101, 109, 98, 108, 97, 110, 99, 101, 32, 116, 111, 111, 110, 32, 116, 104, 101, 32, 103, 114, 111, 117, 110, 100, 115, 119, 104, 105, 99, 104, 32, 99, 111, 110, 116, 97, 105, 110, 115, 105, 110, 99, 108, 117, 100, 105, 110, 103, 32, 116, 104, 101, 32, 100, 101, 102, 105, 110, 101, 100, 32, 98, 121, 32, 116, 104, 101, 112, 117, 98, 108, 105, 99, 97, 116, 105, 111, 110, 32, 111, 102, 109, 101, 97, 110, 115, 32, 116, 104, 97, 116, 32, 116, 104, 101, 111, 117, 116, 115, 105, 100, 101, 32, 111, 102, 32, 116, 104, 101, 115, 117, 112, 112, 111, 114, 116, 32, 111, 102, 32, 116, 104, 101, 60, 105, 110, 112, 117, 116, 32, 99, 108, 97, 115, 115, 61, 34, 60, 115, 112, 97, 110, 32, 99, 108, 97, 115, 115, 61, 34, 116, 40, 77, 97, 116, 104, 46, 114, 97, 110, 100, 111, 109, 40, 41, 109, 111, 115, 116, 32, 112, 114, 111, 109, 105, 110, 101, 110, 116, 100, 101, 115, 99, 114, 105, 112, 116, 105, 111, 110, 32, 111, 102, 67, 111, 110, 115, 116, 97, 110, 116, 105, 110, 111, 112, 108, 101, 119, 101, 114, 101, 32, 112, 117, 98, 108, 105, 115, 104, 101, 100, 60, 100, 105, 118, 32, 99, 108, 97, 115, 115, 61, 34, 115, 101, 97, 112, 112, 101, 97, 114, 115, 32, 105, 110, 32, 116, 104, 101, 49, 34, 32, 104, 101, 105, 103, 104, 116, 61, 34, 49, 34, 32, 109, 111, 115, 116, 32, 105, 109, 112, 111, 114, 116, 97, 110, 116, 119, 104, 105, 99, 104, 32, 105, 110, 99, 108, 117, 100, 101, 115, 119, 104, 105, 99, 104, 32, 104, 97, 100, 32, 98, 101, 101, 110, 100, 101, 115, 116, 114, 117, 99, 116, 105, 111, 110, 32, 111, 102, 116, 104, 101, 32, 112, 111, 112, 117, 108, 97, 116, 105, 111, 110, 10, 9, 60, 100, 105, 118, 32, 99, 108, 97, 115, 115, 61, 34, 112, 111, 115, 115, 105, 98, 105, 108, 105, 116, 121, 32, 111, 102, 115, 111, 109, 101, 116, 105, 109, 101, 115, 32, 117, 115, 101, 100, 97, 112, 112, 101, 97, 114, 32, 116, 111, 32, 104, 97, 118, 101, 115, 117, 99, 99, 101, 115, 115, 32, 111, 102, 32, 116, 104, 101, 105, 110, 116, 101, 110, 100, 101, 100, 32, 116, 111, 32, 98, 101, 112, 114, 101, 115, 101, 110, 116, 32, 105, 110, 32, 116, 104, 101, 115, 116, 121, 108, 101, 61, 34, 99, 108, 101, 97, 114, 58, 98, 13, 10, 60, 47, 115, 99, 114, 105, 112, 116, 62, 13, 10, 60, 119, 97, 115, 32, 102, 111, 117, 110, 100, 101, 100, 32, 105, 110, 105, 110, 116, 101, 114, 118, 105, 101, 119, 32, 119, 105, 116, 104, 95, 105, 100, 34, 32, 99, 111, 110, 116, 101, 110, 116, 61, 34, 99, 97, 112, 105, 116, 97, 108, 32, 111, 102, 32, 116, 104, 101, 13, 10, 60, 108, 105, 110, 107, 32, 114, 101, 108, 61, 34, 115, 114, 101, 108, 101, 97, 115, 101, 32, 111, 102, 32, 116, 104, 101, 112, 111, 105, 110, 116, 32, 111, 117, 116, 32, 116, 104, 97, 116, 120, 77, 76, 72, 116, 116, 112, 82, 101, 113, 117, 101, 115, 116, 97, 110, 100, 32, 115, 117, 98, 115, 101, 113, 117, 101, 110, 116, 115, 101, 99, 111, 110, 100, 32, 108, 97, 114, 103, 101, 115, 116, 118, 101, 114, 121, 32, 105, 109, 112, 111, 114, 116, 97, 110, 116, 115, 112, 101, 99, 105, 102, 105, 99, 97, 116, 105, 111, 110, 115, 115, 117, 114, 102, 97, 99, 101, 32, 111, 102, 32, 116, 104, 101, 97, 112, 112, 108, 105, 101, 100, 32, 116, 111, 32, 116, 104, 101, 102, 111, 114, 101, 105, 103, 110, 32, 112, 111, 108, 105, 99, 121, 95, 115, 101, 116, 68, 111, 109, 97, 105, 110, 78, 97, 109, 101, 101, 115, 116, 97, 98, 108, 105, 115, 104, 101, 100, 32, 105, 110, 105, 115, 32, 98, 101, 108, 105, 101, 118, 101, 100, 32, 116, 111, 73, 110, 32, 97, 100, 100, 105, 116, 105, 111, 110, 32, 116, 111, 109, 101, 97, 110, 105, 110, 103, 32, 111, 102, 32, 116, 104, 101, 105, 115, 32, 110, 97, 109, 101, 100, 32, 97, 102, 116, 101, 114, 116, 111, 32, 112, 114, 111, 116, 101, 99, 116, 32, 116, 104, 101, 105, 115, 32, 114, 101, 112, 114, 101, 115, 101, 110, 116, 101, 100, 68, 101, 99, 108, 97, 114, 97, 116, 105, 111, 110, 32, 111, 102, 109, 111, 114, 101, 32, 101, 102, 102, 105, 99, 105, 101, 110, 116, 67, 108, 97, 115, 115, 105, 102, 105, 99, 97, 116, 105, 111, 110, 111, 116, 104, 101, 114, 32, 102, 111, 114, 109, 115, 32, 111, 102, 104, 101, 32, 114, 101, 116, 117, 114, 110, 101, 100, 32, 116, 111, 60, 115, 112, 97, 110, 32, 99, 108, 97, 115, 115, 61, 34, 99, 112, 101, 114, 102, 111, 114, 109, 97, 110, 99, 101, 32, 111, 102, 40, 102, 117, 110, 99, 116, 105, 111, 110, 40, 41, 32, 123, 13, 105, 102, 32, 97, 110, 100, 32, 111, 110, 108, 121, 32, 105, 102, 114, 101, 103, 105, 111, 110, 115, 32, 111, 102, 32, 116, 104, 101, 108, 101, 97, 100, 105, 110, 103, 32, 116, 111, 32, 116, 104, 101, 114, 101, 108, 97, 116, 105, 111, 110, 115, 32, 119, 105, 116, 104, 85, 110, 105, 116, 101, 100, 32, 78, 97, 116, 105, 111, 110, 115, 115, 116, 121, 108, 101, 61, 34, 104, 101, 105, 103, 104, 116, 58, 111, 116, 104, 101, 114, 32, 116, 104, 97, 110, 32, 116, 104, 101, 121, 112, 101, 34, 32, 99, 111, 110, 116, 101, 110, 116, 61, 34, 65, 115, 115, 111, 99, 105, 97, 116, 105, 111, 110, 32, 111, 102, 10, 60, 47, 104, 101, 97, 100, 62, 10, 60, 98, 111, 100, 121, 108, 111, 99, 97, 116, 101, 100, 32, 111, 110, 32, 116, 104, 101, 105, 115, 32, 114, 101, 102, 101, 114, 114, 101, 100, 32, 116, 111, 40, 105, 110, 99, 108, 117, 100, 105, 110, 103, 32, 116, 104, 101, 99, 111, 110, 99, 101, 110, 116, 114, 97, 116, 105, 111, 110, 115, 116, 104, 101, 32, 105, 110, 100, 105, 118, 105, 100, 117, 97, 108, 97, 109, 111, 110, 103, 32, 116, 104, 101, 32, 109, 111, 115, 116, 116, 104, 97, 110, 32, 97, 110, 121, 32, 111, 116, 104, 101, 114, 47, 62, 10, 60, 108, 105, 110, 107, 32, 114, 101, 108, 61, 34, 32, 114, 101, 116, 117, 114, 110, 32, 102, 97, 108, 115, 101, 59, 116, 104, 101, 32, 112, 117, 114, 112, 111, 115, 101, 32, 111, 102, 116, 104, 101, 32, 97, 98, 105, 108, 105, 116, 121, 32, 116, 111, 59, 99, 111, 108, 111, 114, 58, 35, 102, 102, 102, 125, 10, 46, 10, 60, 115, 112, 97, 110, 32, 99, 108, 97, 115, 115, 61, 34, 116, 104, 101, 32, 115, 117, 98, 106, 101, 99, 116, 32, 111, 102, 100, 101, 102, 105, 110, 105, 116, 105, 111, 110, 115, 32, 111, 102, 62, 13, 10, 60, 108, 105, 110, 107, 32, 114, 101, 108, 61, 34, 99, 108, 97, 105, 109, 32, 116, 104, 97, 116, 32, 116, 104, 101, 104, 97, 118, 101, 32, 100, 101, 118, 101, 108, 111, 112, 101, 100, 60, 116, 97, 98, 108, 101, 32, 119, 105, 100, 116, 104, 61, 34, 99, 101, 108, 101, 98, 114, 97, 116, 105, 111, 110, 32, 111, 102, 70, 111, 108, 108, 111, 119, 105, 110, 103, 32, 116, 104, 101, 32, 116, 111, 32, 100, 105, 115, 116, 105, 110, 103, 117, 105, 115, 104, 60, 115, 112, 97, 110, 32, 99, 108, 97, 115, 115, 61, 34, 98, 116, 97, 107, 101, 115, 32, 112, 108, 97, 99, 101, 32, 105, 110, 117, 110, 100, 101, 114, 32, 116, 104, 101, 32, 110, 97, 109, 101, 110, 111, 116, 101, 100, 32, 116, 104, 97, 116, 32, 116, 104, 101, 62, 60, 33, 91, 101, 110, 100, 105, 102, 93, 45, 45, 62, 10, 115, 116, 121, 108, 101, 61, 34, 109, 97, 114, 103, 105, 110, 45, 105, 110, 115, 116, 101, 97, 100, 32, 111, 102, 32, 116, 104, 101, 105, 110, 116, 114, 111, 100, 117, 99, 101, 100, 32, 116, 104, 101, 116, 104, 101, 32, 112, 114, 111, 99, 101, 115, 115, 32, 111, 102, 105, 110, 99, 114, 101, 97, 115, 105, 110, 103, 32, 116, 104, 101, 100, 105, 102, 102, 101, 114, 101, 110, 99, 101, 115, 32, 105, 110, 101, 115, 116, 105, 109, 97, 116, 101, 100, 32, 116, 104, 97, 116, 101, 115, 112, 101, 99, 105, 97, 108, 108, 121, 32, 116, 104, 101, 47, 100, 105, 118, 62, 60, 100, 105, 118, 32, 105, 100, 61, 34, 119, 97, 115, 32, 101, 118, 101, 110, 116, 117, 97, 108, 108, 121, 116, 104, 114, 111, 117, 103, 104, 111, 117, 116, 32, 104, 105, 115, 116, 104, 101, 32, 100, 105, 102, 102, 101, 114, 101, 110, 99, 101, 115, 111, 109, 101, 116, 104, 105, 110, 103, 32, 116, 104, 97, 116, 115, 112, 97, 110, 62, 60, 47, 115, 112, 97, 110, 62, 60, 47, 115, 105, 103, 110, 105, 102, 105, 99, 97, 110, 116, 108, 121, 32, 62, 60, 47, 115, 99, 114, 105, 112, 116, 62, 13, 10, 13, 10, 101, 110, 118, 105, 114, 111, 110, 109, 101, 110, 116, 97, 108, 32, 116, 111, 32, 112, 114, 101, 118, 101, 110, 116, 32, 116, 104, 101, 104, 97, 118, 101, 32, 98, 101, 101, 110, 32, 117, 115, 101, 100, 101, 115, 112, 101, 99, 105, 97, 108, 108, 121, 32, 102, 111, 114, 117, 110, 100, 101, 114, 115, 116, 97, 110, 100, 32, 116, 104, 101, 105, 115, 32, 101, 115, 115, 101, 110, 116, 105, 97, 108, 108, 121, 119, 101, 114, 101, 32, 116, 104, 101, 32, 102, 105, 114, 115, 116, 105, 115, 32, 116, 104, 101, 32, 108, 97, 114, 103, 101, 115, 116, 104, 97, 118, 101, 32, 98, 101, 101, 110, 32, 109, 97, 100, 101, 34, 32, 115, 114, 99, 61, 34, 104, 116, 116, 112, 58, 47, 47, 105, 110, 116, 101, 114, 112, 114, 101, 116, 101, 100, 32, 97, 115, 115, 101, 99, 111, 110, 100, 32, 104, 97, 108, 102, 32, 111, 102, 99, 114, 111, 108, 108, 105, 110, 103, 61, 34, 110, 111, 34, 32, 105, 115, 32, 99, 111, 109, 112, 111, 115, 101, 100, 32, 111, 102, 73, 73, 44, 32, 72, 111, 108, 121, 32, 82, 111, 109, 97, 110, 105, 115, 32, 101, 120, 112, 101, 99, 116, 101, 100, 32, 116, 111, 104, 97, 118, 101, 32, 116, 104, 101, 105, 114, 32, 111, 119, 110, 100, 101, 102, 105, 110, 101, 100, 32, 97, 115, 32, 116, 104, 101, 116, 114, 97, 100, 105, 116, 105, 111, 110, 97, 108, 108, 121, 32, 104, 97, 118, 101, 32, 100, 105, 102, 102, 101, 114, 101, 110, 116, 97, 114, 101, 32, 111, 102, 116, 101, 110, 32, 117, 115, 101, 100, 116, 111, 32, 101, 110, 115, 117, 114, 101, 32, 116, 104, 97, 116, 97, 103, 114, 101, 101, 109, 101, 110, 116, 32, 119, 105, 116, 104, 99, 111, 110, 116, 97, 105, 110, 105, 110, 103, 32, 116, 104, 101, 97, 114, 101, 32, 102, 114, 101, 113, 117, 101, 110, 116, 108, 121, 105, 110, 102, 111, 114, 109, 97, 116, 105, 111, 110, 32, 111, 110, 101, 120, 97, 109, 112, 108, 101, 32, 105, 115, 32, 116, 104, 101, 114, 101, 115, 117, 108, 116, 105, 110, 103, 32, 105, 110, 32, 97, 60, 47, 97, 62, 60, 47, 108, 105, 62, 60, 47, 117, 108, 62, 32, 99, 108, 97, 115, 115, 61, 34, 102, 111, 111, 116, 101, 114, 97, 110, 100, 32, 101, 115, 112, 101, 99, 105, 97, 108, 108, 121, 116, 121, 112, 101, 61, 34, 98, 117, 116, 116, 111, 110, 34, 32, 60, 47, 115, 112, 97, 110, 62, 60, 47, 115, 112, 97, 110, 62, 119, 104, 105, 99, 104, 32, 105, 110, 99, 108, 117, 100, 101, 100, 62, 10, 60, 109, 101, 116, 97, 32, 110, 97, 109, 101, 61, 34, 99, 111, 110, 115, 105, 100, 101, 114, 101, 100, 32, 116, 104, 101, 99, 97, 114, 114, 105, 101, 100, 32, 111, 117, 116, 32, 98, 121, 72, 111, 119, 101, 118, 101, 114, 44, 32, 105, 116, 32, 105, 115, 98, 101, 99, 97, 109, 101, 32, 112, 97, 114, 116, 32, 111, 102, 105, 110, 32, 114, 101, 108, 97, 116, 105, 111, 110, 32, 116, 111, 112, 111, 112, 117, 108, 97, 114, 32, 105, 110, 32, 116, 104, 101, 116, 104, 101, 32, 99, 97, 112, 105, 116, 97, 108, 32, 111, 102, 119, 97, 115, 32, 111, 102, 102, 105, 99, 105, 97, 108, 108, 121, 119, 104, 105, 99, 104, 32, 104, 97, 115, 32, 98, 101, 101, 110, 116, 104, 101, 32, 72, 105, 115, 116, 111, 114, 121, 32, 111, 102, 97, 108, 116, 101, 114, 110, 97, 116, 105, 118, 101, 32, 116, 111, 100, 105, 102, 102, 101, 114, 101, 110, 116, 32, 102, 114, 111, 109, 116, 111, 32, 115, 117, 112, 112, 111, 114, 116, 32, 116, 104, 101, 115, 117, 103, 103, 101, 115, 116, 101, 100, 32, 116, 104, 97, 116, 105, 110, 32, 116, 104, 101, 32, 112, 114, 111, 99, 101, 115, 115, 32, 32, 60, 100, 105, 118, 32, 99, 108, 97, 115, 115, 61, 34, 116, 104, 101, 32, 102, 111, 117, 110, 100, 97, 116, 105, 111, 110, 98, 101, 99, 97, 117, 115, 101, 32, 111, 102, 32, 104, 105, 115, 99, 111, 110, 99, 101, 114, 110, 101, 100, 32, 119, 105, 116, 104, 116, 104, 101, 32, 117, 110, 105, 118, 101, 114, 115, 105, 116, 121, 111, 112, 112, 111, 115, 101, 100, 32, 116, 111, 32, 116, 104, 101, 116, 104, 101, 32, 99, 111, 110, 116, 101, 120, 116, 32, 111, 102, 60, 115, 112, 97, 110, 32, 99, 108, 97, 115, 115, 61, 34, 112, 116, 101, 120, 116, 34, 32, 110, 97, 109, 101, 61, 34, 113, 34, 9, 9, 60, 100, 105, 118, 32, 99, 108, 97, 115, 115, 61, 34, 116, 104, 101, 32, 115, 99, 105, 101, 110, 116, 105, 102, 105, 99, 114, 101, 112, 114, 101, 115, 101, 110, 116, 101, 100, 32, 98, 121, 109, 97, 116, 104, 101, 109, 97, 116, 105, 99, 105, 97, 110, 115, 101, 108, 101, 99, 116, 101, 100, 32, 98, 121, 32, 116, 104, 101, 116, 104, 97, 116, 32, 104, 97, 118, 101, 32, 98, 101, 101, 110, 62, 60, 100, 105, 118, 32, 99, 108, 97, 115, 115, 61, 34, 99, 100, 105, 118, 32, 105, 100, 61, 34, 104, 101, 97, 100, 101, 114, 105, 110, 32, 112, 97, 114, 116, 105, 99, 117, 108, 97, 114, 44, 99, 111, 110, 118, 101, 114, 116, 101, 100, 32, 105, 110, 116, 111, 41, 59, 10, 60, 47, 115, 99, 114, 105, 112, 116, 62, 10, 60, 112, 104, 105, 108, 111, 115, 111, 112, 104, 105, 99, 97, 108, 32, 115, 114, 112, 115, 107, 111, 104, 114, 118, 97, 116, 115, 107, 105, 116, 105, 225, 186, 191, 110, 103, 32, 86, 105, 225, 187, 135, 116, 208, 160, 209, 131, 209, 129, 209, 129, 208, 186, 208, 184, 208, 185, 209, 128, 209, 131, 209, 129, 209, 129, 208, 186, 208, 184, 208, 185, 105, 110, 118, 101, 115, 116, 105, 103, 97, 99, 105, 195, 179, 110, 112, 97, 114, 116, 105, 99, 105, 112, 97, 99, 105, 195, 179, 110, 208, 186, 208, 190, 209, 130, 208, 190, 209, 128, 209, 139, 208, 181, 208, 190, 208, 177, 208, 187, 208, 176, 209, 129, 209, 130, 208, 184, 208, 186, 208, 190, 209, 130, 208, 190, 209, 128, 209, 139, 208, 185, 209, 135, 208, 181, 208, 187, 208, 190, 208, 178, 208, 181, 208, 186, 209, 129, 208, 184, 209, 129, 209, 130, 208, 181, 208, 188, 209, 139, 208, 157, 208, 190, 208, 178, 208, 190, 209, 129, 209, 130, 208, 184, 208, 186, 208, 190, 209, 130, 208, 190, 209, 128, 209, 139, 209, 133, 208, 190, 208, 177, 208, 187, 208, 176, 209, 129, 209, 130, 209, 140, 208, 178, 209, 128, 208, 181, 208, 188, 208, 181, 208, 189, 208, 184, 208, 186, 208, 190, 209, 130, 208, 190, 209, 128, 208, 176, 209, 143, 209, 129, 208, 181, 208, 179, 208, 190, 208, 180, 208, 189, 209, 143, 209, 129, 208, 186, 208, 176, 209, 135, 208, 176, 209, 130, 209, 140, 208, 189, 208, 190, 208, 178, 208, 190, 209, 129, 209, 130, 208, 184, 208, 163, 208, 186, 209, 128, 208, 176, 208, 184, 208, 189, 209, 139, 208, 178, 208, 190, 208, 191, 209, 128, 208, 190, 209, 129, 209, 139, 208, 186, 208, 190, 209, 130, 208, 190, 209, 128, 208, 190, 208, 185, 209, 129, 208, 180, 208, 181, 208, 187, 208, 176, 209, 130, 209, 140, 208, 191, 208, 190, 208, 188, 208, 190, 209, 137, 209, 140, 209, 142, 209, 129, 209, 128, 208, 181, 208, 180, 209, 129, 209, 130, 208, 178, 208, 190, 208, 177, 209, 128, 208, 176, 208, 183, 208, 190, 208, 188, 209, 129, 209, 130, 208, 190, 209, 128, 208, 190, 208, 189, 209, 139, 209, 131, 209, 135, 208, 176, 209, 129, 209, 130, 208, 184, 208, 181, 209, 130, 208, 181, 209, 135, 208, 181, 208, 189, 208, 184, 208, 181, 208, 147, 208, 187, 208, 176, 208, 178, 208, 189, 208, 176, 209, 143, 208, 184, 209, 129, 209, 130, 208, 190, 209, 128, 208, 184, 208, 184, 209, 129, 208, 184, 209, 129, 209, 130, 208, 181, 208, 188, 208, 176, 209, 128, 208, 181, 209, 136, 208, 181, 208, 189, 208, 184, 209, 143, 208, 161, 208, 186, 208, 176, 209, 135, 208, 176, 209, 130, 209, 140, 208, 191, 208, 190, 209, 141, 209, 130, 208, 190, 208, 188, 209, 131, 209, 129, 208, 187, 208, 181, 208, 180, 209, 131, 208, 181, 209, 130, 209, 129, 208, 186, 208, 176, 208, 183, 208, 176, 209, 130, 209, 140, 209, 130, 208, 190, 208, 178, 208, 176, 209, 128, 208, 190, 208, 178, 208, 186, 208, 190, 208, 189, 208, 181, 209, 135, 208, 189, 208, 190, 209, 128, 208, 181, 209, 136, 208, 181, 208, 189, 208, 184, 208, 181, 208, 186, 208, 190, 209, 130, 208, 190, 209, 128, 208, 190, 208, 181, 208, 190, 209, 128, 208, 179, 208, 176, 208, 189, 208, 190, 208, 178, 208, 186, 208, 190, 209, 130, 208, 190, 209, 128, 208, 190, 208, 188, 208, 160, 208, 181, 208, 186, 208, 187, 208, 176, 208, 188, 208, 176, 216, 167, 217, 132, 217, 133, 217, 134, 216, 170, 216, 175, 217, 137, 217, 133, 217, 134, 216, 170, 216, 175, 217, 138, 216, 167, 216, 170, 216, 167, 217, 132, 217, 133, 217, 136, 216, 182, 217, 136, 216, 185, 216, 167, 217, 132, 216, 168, 216, 177, 216, 167, 217, 133, 216, 172, 216, 167, 217, 132, 217, 133, 217, 136, 216, 167, 217, 130, 216, 185, 216, 167, 217, 132, 216, 177, 216, 179, 216, 167, 216, 166, 217, 132, 217, 133, 216, 180, 216, 167, 216, 177, 217, 131, 216, 167, 216, 170, 216, 167, 217, 132, 216, 163, 216, 185, 216, 182, 216, 167, 216, 161, 216, 167, 217, 132, 216, 177, 217, 138, 216, 167, 216, 182, 216, 169, 216, 167, 217, 132, 216, 170, 216, 181, 217, 133, 217, 138, 217, 133, 216, 167, 217, 132, 216, 167, 216, 185, 216, 182, 216, 167, 216, 161, 216, 167, 217, 132, 217, 134, 216, 170, 216, 167, 216, 166, 216, 172, 216, 167, 217, 132, 216, 163, 217, 132, 216, 185, 216, 167, 216, 168, 216, 167, 217, 132, 216, 170, 216, 179, 216, 172, 217, 138, 217, 132, 216, 167, 217, 132, 216, 163, 217, 130, 216, 179, 216, 167, 217, 133, 216, 167, 217, 132, 216, 182, 216, 186, 216, 183, 216, 167, 216, 170, 216, 167, 217, 132, 217, 129, 217, 138, 216, 175, 217, 138, 217, 136, 216, 167, 217, 132, 216, 170, 216, 177, 216, 173, 217, 138, 216, 168, 216, 167, 217, 132, 216, 172, 216, 175, 217, 138, 216, 175, 216, 169, 216, 167, 217, 132, 216, 170, 216, 185, 217, 132, 217, 138, 217, 133, 216, 167, 217, 132, 216, 163, 216, 174, 216, 168, 216, 167, 216, 177, 216, 167, 217, 132, 216, 167, 217, 129, 217, 132, 216, 167, 217, 133, 216, 167, 217, 132, 216, 163, 217, 129, 217, 132, 216, 167, 217, 133, 216, 167, 217, 132, 216, 170, 216, 167, 216, 177, 217, 138, 216, 174, 216, 167, 217, 132, 216, 170, 217, 130, 217, 134, 217, 138, 216, 169, 216, 167, 217, 132, 216, 167, 217, 132, 216, 185, 216, 167, 216, 168, 216, 167, 217, 132, 216, 174, 217, 136, 216, 167, 216, 183, 216, 177, 216, 167, 217, 132, 217, 133, 216, 172, 216, 170, 217, 133, 216, 185, 216, 167, 217, 132, 216, 175, 217, 138, 217, 131, 217, 136, 216, 177, 216, 167, 217, 132, 216, 179, 217, 138, 216, 167, 216, 173, 216, 169, 216, 185, 216, 168, 216, 175, 216, 167, 217, 132, 217, 132, 217, 135, 216, 167, 217, 132, 216, 170, 216, 177, 216, 168, 217, 138, 216, 169, 216, 167, 217, 132, 216, 177, 217, 136, 216, 167, 216, 168, 216, 183, 216, 167, 217, 132, 216, 163, 216, 175, 216, 168, 217, 138, 216, 169, 216, 167, 217, 132, 216, 167, 216, 174, 216, 168, 216, 167, 216, 177, 216, 167, 217, 132, 217, 133, 216, 170, 216, 173, 216, 175, 216, 169, 216, 167, 217, 132, 216, 167, 216, 186, 216, 167, 217, 134, 217, 138, 99, 117, 114, 115, 111, 114, 58, 112, 111, 105, 110, 116, 101, 114, 59, 60, 47, 116, 105, 116, 108, 101, 62, 10, 60, 109, 101, 116, 97, 32, 34, 32, 104, 114, 101, 102, 61, 34, 104, 116, 116, 112, 58, 47, 47, 34, 62, 60, 115, 112, 97, 110, 32, 99, 108, 97, 115, 115, 61, 34, 109, 101, 109, 98, 101, 114, 115, 32, 111, 102, 32, 116, 104, 101, 32, 119, 105, 110, 100, 111, 119, 46, 108, 111, 99, 97, 116, 105, 111, 110, 118, 101, 114, 116, 105, 99, 97, 108, 45, 97, 108, 105, 103, 110, 58, 47, 97, 62, 32, 124, 32, 60, 97, 32, 104, 114, 101, 102, 61, 34, 60, 33, 100, 111, 99, 116, 121, 112, 101, 32, 104, 116, 109, 108, 62, 109, 101, 100, 105, 97, 61, 34, 115, 99, 114, 101, 101, 110, 34, 32, 60, 111, 112, 116, 105, 111, 110, 32, 118, 97, 108, 117, 101, 61, 34, 102, 97, 118, 105, 99, 111, 110, 46, 105, 99, 111, 34, 32, 47, 62, 10, 9, 9, 60, 100, 105, 118, 32, 99, 108, 97, 115, 115, 61, 34, 99, 104, 97, 114, 97, 99, 116, 101, 114, 105, 115, 116, 105, 99, 115, 34, 32, 109, 101, 116, 104, 111, 100, 61, 34, 103, 101, 116, 34, 32, 47, 98, 111, 100, 121, 62, 10, 60, 47, 104, 116, 109, 108, 62, 10, 115, 104, 111, 114, 116, 99, 117, 116, 32, 105, 99, 111, 110, 34, 32, 100, 111, 99, 117, 109, 101, 110, 116, 46, 119, 114, 105, 116, 101, 40, 112, 97, 100, 100, 105, 110, 103, 45, 98, 111, 116, 116, 111, 109, 58, 114, 101, 112, 114, 101, 115, 101, 110, 116, 97, 116, 105, 118, 101, 115, 115, 117, 98, 109, 105, 116, 34, 32, 118, 97, 108, 117, 101, 61, 34, 97, 108, 105, 103, 110, 61, 34, 99, 101, 110, 116, 101, 114, 34, 32, 116, 104, 114, 111, 117, 103, 104, 111, 117, 116, 32, 116, 104, 101, 32, 115, 99, 105, 101, 110, 99, 101, 32, 102, 105, 99, 116, 105, 111, 110, 10, 32, 32, 60, 100, 105, 118, 32, 99, 108, 97, 115, 115, 61, 34, 115, 117, 98, 109, 105, 116, 34, 32, 99, 108, 97, 115, 115, 61, 34, 111, 110, 101, 32, 111, 102, 32, 116, 104, 101, 32, 109, 111, 115, 116, 32, 118, 97, 108, 105, 103, 110, 61, 34, 116, 111, 112, 34, 62, 60, 119, 97, 115, 32, 101, 115, 116, 97, 98, 108, 105, 115, 104, 101, 100, 41, 59, 13, 10, 60, 47, 115, 99, 114, 105, 112, 116, 62, 13, 10, 114, 101, 116, 117, 114, 110, 32, 102, 97, 108, 115, 101, 59, 34, 62, 41, 46, 115, 116, 121, 108, 101, 46, 100, 105, 115, 112, 108, 97, 121, 98, 101, 99, 97, 117, 115, 101, 32, 111, 102, 32, 116, 104, 101, 32, 100, 111, 99, 117, 109, 101, 110, 116, 46, 99, 111, 111, 107, 105, 101, 60, 102, 111, 114, 109, 32, 97, 99, 116, 105, 111, 110, 61, 34, 47, 125, 98, 111, 100, 121, 123, 109, 97, 114, 103, 105, 110, 58, 48, 59, 69, 110, 99, 121, 99, 108, 111, 112, 101, 100, 105, 97, 32, 111, 102, 118, 101, 114, 115, 105, 111, 110, 32, 111, 102, 32, 116, 104, 101, 32, 46, 99, 114, 101, 97, 116, 101, 69, 108, 101, 109, 101, 110, 116, 40, 110, 97, 109, 101, 34, 32, 99, 111, 110, 116, 101, 110, 116, 61, 34, 60, 47, 100, 105, 118, 62, 10, 60, 47, 100, 105, 118, 62, 10, 10, 97, 100, 109, 105, 110, 105, 115, 116, 114, 97, 116, 105, 118, 101, 32, 60, 47, 98, 111, 100, 121, 62, 10, 60, 47, 104, 116, 109, 108, 62, 104, 105, 115, 116, 111, 114, 121, 32, 111, 102, 32, 116, 104, 101, 32, 34, 62, 60, 105, 110, 112, 117, 116, 32, 116, 121, 112, 101, 61, 34, 112, 111, 114, 116, 105, 111, 110, 32, 111, 102, 32, 116, 104, 101, 32, 97, 115, 32, 112, 97, 114, 116, 32, 111, 102, 32, 116, 104, 101, 32, 38, 110, 98, 115, 112, 59, 60, 97, 32, 104, 114, 101, 102, 61, 34, 111, 116, 104, 101, 114, 32, 99, 111, 117, 110, 116, 114, 105, 101, 115, 34, 62, 10, 60, 100, 105, 118, 32, 99, 108, 97, 115, 115, 61, 34, 60, 47, 115, 112, 97, 110, 62, 60, 47, 115, 112, 97, 110, 62, 60, 73, 110, 32, 111, 116, 104, 101, 114, 32, 119, 111, 114, 100, 115, 44, 100, 105, 115, 112, 108, 97, 121, 58, 32, 98, 108, 111, 99, 107, 59, 99, 111, 110, 116, 114, 111, 108, 32, 111, 102, 32, 116, 104, 101, 32, 105, 110, 116, 114, 111, 100, 117, 99, 116, 105, 111, 110, 32, 111, 102, 47, 62, 10, 60, 109, 101, 116, 97, 32, 110, 97, 109, 101, 61, 34, 97, 115, 32, 119, 101, 108, 108, 32, 97, 115, 32, 116, 104, 101, 32, 105, 110, 32, 114, 101, 99, 101, 110, 116, 32, 121, 101, 97, 114, 115, 13, 10, 9, 60, 100, 105, 118, 32, 99, 108, 97, 115, 115, 61, 34, 60, 47, 100, 105, 118, 62, 10, 9, 60, 47, 100, 105, 118, 62, 10, 105, 110, 115, 112, 105, 114, 101, 100, 32, 98, 121, 32, 116, 104, 101, 116, 104, 101, 32, 101, 110, 100, 32, 111, 102, 32, 116, 104, 101, 32, 99, 111, 109, 112, 97, 116, 105, 98, 108, 101, 32, 119, 105, 116, 104, 98, 101, 99, 97, 109, 101, 32, 107, 110, 111, 119, 110, 32, 97, 115, 32, 115, 116, 121, 108, 101, 61, 34, 109, 97, 114, 103, 105, 110, 58, 46, 106, 115, 34, 62, 60, 47, 115, 99, 114, 105, 112, 116, 62, 60, 32, 73, 110, 116, 101, 114, 110, 97, 116, 105, 111, 110, 97, 108, 32, 116, 104, 101, 114, 101, 32, 104, 97, 118, 101, 32, 98, 101, 101, 110, 71, 101, 114, 109, 97, 110, 32, 108, 97, 110, 103, 117, 97, 103, 101, 32, 115, 116, 121, 108, 101, 61, 34, 99, 111, 108, 111, 114, 58, 35, 67, 111, 109, 109, 117, 110, 105, 115, 116, 32, 80, 97, 114, 116, 121, 99, 111, 110, 115, 105, 115, 116, 101, 110, 116, 32, 119, 105, 116, 104, 98, 111, 114, 100, 101, 114, 61, 34, 48, 34, 32, 99, 101, 108, 108, 32, 109, 97, 114, 103, 105, 110, 104, 101, 105, 103, 104, 116, 61, 34, 116, 104, 101, 32, 109, 97, 106, 111, 114, 105, 116, 121, 32, 111, 102, 34, 32, 97, 108, 105, 103, 110, 61, 34, 99, 101, 110, 116, 101, 114, 114, 101, 108, 97, 116, 101, 100, 32, 116, 111, 32, 116, 104, 101, 32, 109, 97, 110, 121, 32, 100, 105, 102, 102, 101, 114, 101, 110, 116, 32, 79, 114, 116, 104, 111, 100, 111, 120, 32, 67, 104, 117, 114, 99, 104, 115, 105, 109, 105, 108, 97, 114, 32, 116, 111, 32, 116, 104, 101, 32, 47, 62, 10, 60, 108, 105, 110, 107, 32, 114, 101, 108, 61, 34, 115, 119, 97, 115, 32, 111, 110, 101, 32, 111, 102, 32, 116, 104, 101, 32, 117, 110, 116, 105, 108, 32, 104, 105, 115, 32, 100, 101, 97, 116, 104, 125, 41, 40, 41, 59, 10, 60, 47, 115, 99, 114, 105, 112, 116, 62, 111, 116, 104, 101, 114, 32, 108, 97, 110, 103, 117, 97, 103, 101, 115, 99, 111, 109, 112, 97, 114, 101, 100, 32, 116, 111, 32, 116, 104, 101, 112, 111, 114, 116, 105, 111, 110, 115, 32, 111, 102, 32, 116, 104, 101, 116, 104, 101, 32, 78, 101, 116, 104, 101, 114, 108, 97, 110, 100, 115, 116, 104, 101, 32, 109, 111, 115, 116, 32, 99, 111, 109, 109, 111, 110, 98, 97, 99, 107, 103, 114, 111, 117, 110, 100, 58, 117, 114, 108, 40, 97, 114, 103, 117, 101, 100, 32, 116, 104, 97, 116, 32, 116, 104, 101, 115, 99, 114, 111, 108, 108, 105, 110, 103, 61, 34, 110, 111, 34, 32, 105, 110, 99, 108, 117, 100, 101, 100, 32, 105, 110, 32, 116, 104, 101, 78, 111, 114, 116, 104, 32, 65, 109, 101, 114, 105, 99, 97, 110, 32, 116, 104, 101, 32, 110, 97, 109, 101, 32, 111, 102, 32, 116, 104, 101, 105, 110, 116, 101, 114, 112, 114, 101, 116, 97, 116, 105, 111, 110, 115, 116, 104, 101, 32, 116, 114, 97, 100, 105, 116, 105, 111, 110, 97, 108, 100, 101, 118, 101, 108, 111, 112, 109, 101, 110, 116, 32, 111, 102, 32, 102, 114, 101, 113, 117, 101, 110, 116, 108, 121, 32, 117, 115, 101, 100, 97, 32, 99, 111, 108, 108, 101, 99, 116, 105, 111, 110, 32, 111, 102, 118, 101, 114, 121, 32, 115, 105, 109, 105, 108, 97, 114, 32, 116, 111, 115, 117, 114, 114, 111, 117, 110, 100, 105, 110, 103, 32, 116, 104, 101, 101, 120, 97, 109, 112, 108, 101, 32, 111, 102, 32, 116, 104, 105, 115, 97, 108, 105, 103, 110, 61, 34, 99, 101, 110, 116, 101, 114, 34, 62, 119, 111, 117, 108, 100, 32, 104, 97, 118, 101, 32, 98, 101, 101, 110, 105, 109, 97, 103, 101, 95, 99, 97, 112, 116, 105, 111, 110, 32, 61, 97, 116, 116, 97, 99, 104, 101, 100, 32, 116, 111, 32, 116, 104, 101, 115, 117, 103, 103, 101, 115, 116, 105, 110, 103, 32, 116, 104, 97, 116, 105, 110, 32, 116, 104, 101, 32, 102, 111, 114, 109, 32, 111, 102, 32, 105, 110, 118, 111, 108, 118, 101, 100, 32, 105, 110, 32, 116, 104, 101, 105, 115, 32, 100, 101, 114, 105, 118, 101, 100, 32, 102, 114, 111, 109, 110, 97, 109, 101, 100, 32, 97, 102, 116, 101, 114, 32, 116, 104, 101, 73, 110, 116, 114, 111, 100, 117, 99, 116, 105, 111, 110, 32, 116, 111, 114, 101, 115, 116, 114, 105, 99, 116, 105, 111, 110, 115, 32, 111, 110, 32, 115, 116, 121, 108, 101, 61, 34, 119, 105, 100, 116, 104, 58, 32, 99, 97, 110, 32, 98, 101, 32, 117, 115, 101, 100, 32, 116, 111, 32, 116, 104, 101, 32, 99, 114, 101, 97, 116, 105, 111, 110, 32, 111, 102, 109, 111, 115, 116, 32, 105, 109, 112, 111, 114, 116, 97, 110, 116, 32, 105, 110, 102, 111, 114, 109, 97, 116, 105, 111, 110, 32, 97, 110, 100, 114, 101, 115, 117, 108, 116, 101, 100, 32, 105, 110, 32, 116, 104, 101, 99, 111, 108, 108, 97, 112, 115, 101, 32, 111, 102, 32, 116, 104, 101, 84, 104, 105, 115, 32, 109, 101, 97, 110, 115, 32, 116, 104, 97, 116, 101, 108, 101, 109, 101, 110, 116, 115, 32, 111, 102, 32, 116, 104, 101, 119, 97, 115, 32, 114, 101, 112, 108, 97, 99, 101, 100, 32, 98, 121, 97, 110, 97, 108, 121, 115, 105, 115, 32, 111, 102, 32, 116, 104, 101, 105, 110, 115, 112, 105, 114, 97, 116, 105, 111, 110, 32, 102, 111, 114, 114, 101, 103, 97, 114, 100, 101, 100, 32, 97, 115, 32, 116, 104, 101, 109, 111, 115, 116, 32, 115, 117, 99, 99, 101, 115, 115, 102, 117, 108, 107, 110, 111, 119, 110, 32, 97, 115, 32, 38, 113, 117, 111, 116, 59, 97, 32, 99, 111, 109, 112, 114, 101, 104, 101, 110, 115, 105, 118, 101, 72, 105, 115, 116, 111, 114, 121, 32, 111, 102, 32, 116, 104, 101, 32, 119, 101, 114, 101, 32, 99, 111, 110, 115, 105, 100, 101, 114, 101, 100, 114, 101, 116, 117, 114, 110, 101, 100, 32, 116, 111, 32, 116, 104, 101, 97, 114, 101, 32, 114, 101, 102, 101, 114, 114, 101, 100, 32, 116, 111, 85, 110, 115, 111, 117, 114, 99, 101, 100, 32, 105, 109, 97, 103, 101, 62, 10, 9, 60, 100, 105, 118, 32, 99, 108, 97, 115, 115, 61, 34, 99, 111, 110, 115, 105, 115, 116, 115, 32, 111, 102, 32, 116, 104, 101, 115, 116, 111, 112, 80, 114, 111, 112, 97, 103, 97, 116, 105, 111, 110, 105, 110, 116, 101, 114, 101, 115, 116, 32, 105, 110, 32, 116, 104, 101, 97, 118, 97, 105, 108, 97, 98, 105, 108, 105, 116, 121, 32, 111, 102, 97, 112, 112, 101, 97, 114, 115, 32, 116, 111, 32, 104, 97, 118, 101, 101, 108, 101, 99, 116, 114, 111, 109, 97, 103, 110, 101, 116, 105, 99, 101, 110, 97, 98, 108, 101, 83, 101, 114, 118, 105, 99, 101, 115, 40, 102, 117, 110, 99, 116, 105, 111, 110, 32, 111, 102, 32, 116, 104, 101, 73, 116, 32, 105, 115, 32, 105, 109, 112, 111, 114, 116, 97, 110, 116, 60, 47, 115, 99, 114, 105, 112, 116, 62, 60, 47, 100, 105, 118, 62, 102, 117, 110, 99, 116, 105, 111, 110, 40, 41, 123, 118, 97, 114, 32, 114, 101, 108, 97, 116, 105, 118, 101, 32, 116, 111, 32, 116, 104, 101, 97, 115, 32, 97, 32, 114, 101, 115, 117, 108, 116, 32, 111, 102, 32, 116, 104, 101, 32, 112, 111, 115, 105, 116, 105, 111, 110, 32, 111, 102, 70, 111, 114, 32, 101, 120, 97, 109, 112, 108, 101, 44, 32, 105, 110, 32, 109, 101, 116, 104, 111, 100, 61, 34, 112, 111, 115, 116, 34, 32, 119, 97, 115, 32, 102, 111, 108, 108, 111, 119, 101, 100, 32, 98, 121, 38, 97, 109, 112, 59, 109, 100, 97, 115, 104, 59, 32, 116, 104, 101, 116, 104, 101, 32, 97, 112, 112, 108, 105, 99, 97, 116, 105, 111, 110, 106, 115, 34, 62, 60, 47, 115, 99, 114, 105, 112, 116, 62, 13, 10, 117, 108, 62, 60, 47, 100, 105, 118, 62, 60, 47, 100, 105, 118, 62, 97, 102, 116, 101, 114, 32, 116, 104, 101, 32, 100, 101, 97, 116, 104, 119, 105, 116, 104, 32, 114, 101, 115, 112, 101, 99, 116, 32, 116, 111, 115, 116, 121, 108, 101, 61, 34, 112, 97, 100, 100, 105, 110, 103, 58, 105, 115, 32, 112, 97, 114, 116, 105, 99, 117, 108, 97, 114, 108, 121, 100, 105, 115, 112, 108, 97, 121, 58, 105, 110, 108, 105, 110, 101, 59, 32, 116, 121, 112, 101, 61, 34, 115, 117, 98, 109, 105, 116, 34, 32, 105, 115, 32, 100, 105, 118, 105, 100, 101, 100, 32, 105, 110, 116, 111, 228, 184, 173, 230, 150, 135, 32, 40, 231, 174, 128, 228, 189, 147, 41, 114, 101, 115, 112, 111, 110, 115, 97, 98, 105, 108, 105, 100, 97, 100, 97, 100, 109, 105, 110, 105, 115, 116, 114, 97, 99, 105, 195, 179, 110, 105, 110, 116, 101, 114, 110, 97, 99, 105, 111, 110, 97, 108, 101, 115, 99, 111, 114, 114, 101, 115, 112, 111, 110, 100, 105, 101, 110, 116, 101, 224, 164, 137, 224, 164, 170, 224, 164, 175, 224, 165, 139, 224, 164, 151, 224, 164, 170, 224, 165, 130, 224, 164, 176, 224, 165, 141, 224, 164, 181, 224, 164, 185, 224, 164, 174, 224, 164, 190, 224, 164, 176, 224, 165, 135, 224, 164, 178, 224, 165, 139, 224, 164, 151, 224, 165, 139, 224, 164, 130, 224, 164, 154, 224, 165, 129, 224, 164, 168, 224, 164, 190, 224, 164, 181, 224, 164, 178, 224, 165, 135, 224, 164, 149, 224, 164, 191, 224, 164, 168, 224, 164, 184, 224, 164, 176, 224, 164, 149, 224, 164, 190, 224, 164, 176, 224, 164, 170, 224, 165, 129, 224, 164, 178, 224, 164, 191, 224, 164, 184, 224, 164, 150, 224, 165, 139, 224, 164, 156, 224, 165, 135, 224, 164, 130, 224, 164, 154, 224, 164, 190, 224, 164, 185, 224, 164, 191, 224, 164, 143, 224, 164, 173, 224, 165, 135, 224, 164, 156, 224, 165, 135, 224, 164, 130, 224, 164, 182, 224, 164, 190, 224, 164, 174, 224, 164, 191, 224, 164, 178, 224, 164, 185, 224, 164, 174, 224, 164, 190, 224, 164, 176, 224, 165, 128, 224, 164, 156, 224, 164, 190, 224, 164, 151, 224, 164, 176, 224, 164, 163, 224, 164, 172, 224, 164, 168, 224, 164, 190, 224, 164, 168, 224, 165, 135, 224, 164, 149, 224, 165, 129, 224, 164, 174, 224, 164, 190, 224, 164, 176, 224, 164, 172, 224, 165, 141, 224, 164, 178, 224, 165, 137, 224, 164, 151, 224, 164, 174, 224, 164, 190, 224, 164, 178, 224, 164, 191, 224, 164, 149, 224, 164, 174, 224, 164, 185, 224, 164, 191, 224, 164, 178, 224, 164, 190, 224, 164, 170, 224, 165, 131, 224, 164, 183, 224, 165, 141, 224, 164, 160, 224, 164, 172, 224, 164, 162, 224, 164, 188, 224, 164, 164, 224, 165, 135, 224, 164, 173, 224, 164, 190, 224, 164, 156, 224, 164, 170, 224, 164, 190, 224, 164, 149, 224, 165, 141, 224, 164, 178, 224, 164, 191, 224, 164, 149, 224, 164, 159, 224, 165, 141, 224, 164, 176, 224, 165, 135, 224, 164, 168, 224, 164, 150, 224, 164, 191, 224, 164, 178, 224, 164, 190, 224, 164, 171, 224, 164, 166, 224, 165, 140, 224, 164, 176, 224, 164, 190, 224, 164, 168, 224, 164, 174, 224, 164, 190, 224, 164, 174, 224, 164, 178, 224, 165, 135, 224, 164, 174, 224, 164, 164, 224, 164, 166, 224, 164, 190, 224, 164, 168, 224, 164, 172, 224, 164, 190, 224, 164, 156, 224, 164, 190, 224, 164, 176, 224, 164, 181, 224, 164, 191, 224, 164, 149, 224, 164, 190, 224, 164, 184, 224, 164, 149, 224, 165, 141, 224, 164, 175, 224, 165, 139, 224, 164, 130, 224, 164, 154, 224, 164, 190, 224, 164, 185, 224, 164, 164, 224, 165, 135, 224, 164, 170, 224, 164, 185, 224, 165, 129, 224, 164, 129, 224, 164, 154, 224, 164, 172, 224, 164, 164, 224, 164, 190, 224, 164, 175, 224, 164, 190, 224, 164, 184, 224, 164, 130, 224, 164, 181, 224, 164, 190, 224, 164, 166, 224, 164, 166, 224, 165, 135, 224, 164, 150, 224, 164, 168, 224, 165, 135, 224, 164, 170, 224, 164, 191, 224, 164, 155, 224, 164, 178, 224, 165, 135, 224, 164, 181, 224, 164, 191, 224, 164, 182, 224, 165, 135, 224, 164, 183, 224, 164, 176, 224, 164, 190, 224, 164, 156, 224, 165, 141, 224, 164, 175, 224, 164, 137, 224, 164, 164, 224, 165, 141, 224, 164, 164, 224, 164, 176, 224, 164, 174, 224, 165, 129, 224, 164, 130, 224, 164, 172, 224, 164, 136, 224, 164, 166, 224, 165, 139, 224, 164, 168, 224, 165, 139, 224, 164, 130, 224, 164, 137, 224, 164, 170, 224, 164, 149, 224, 164, 176, 224, 164, 163, 224, 164, 170, 224, 164, 162, 224, 164, 188, 224, 165, 135, 224, 164, 130, 224, 164, 184, 224, 165, 141, 224, 164, 165, 224, 164, 191, 224, 164, 164, 224, 164, 171, 224, 164, 191, 224, 164, 178, 224, 165, 141, 224, 164, 174, 224, 164, 174, 224, 165, 129, 224, 164, 150, 224, 165, 141, 224, 164, 175, 224, 164, 133, 224, 164, 154, 224, 165, 141, 224, 164, 155, 224, 164, 190, 224, 164, 155, 224, 165, 130, 224, 164, 159, 224, 164, 164, 224, 165, 128, 224, 164, 184, 224, 164, 130, 224, 164, 151, 224, 165, 128, 224, 164, 164, 224, 164, 156, 224, 164, 190, 224, 164, 143, 224, 164, 151, 224, 164, 190, 224, 164, 181, 224, 164, 191, 224, 164, 173, 224, 164, 190, 224, 164, 151, 224, 164, 152, 224, 164, 163, 224, 165, 141, 224, 164, 159, 224, 165, 135, 224, 164, 166, 224, 165, 130, 224, 164, 184, 224, 164, 176, 224, 165, 135, 224, 164, 166, 224, 164, 191, 224, 164, 168, 224, 165, 139, 224, 164, 130, 224, 164, 185, 224, 164, 164, 224, 165, 141, 224, 164, 175, 224, 164, 190, 224, 164, 184, 224, 165, 135, 224, 164, 149, 224, 165, 141, 224, 164, 184, 224, 164, 151, 224, 164, 190, 224, 164, 130, 224, 164, 167, 224, 165, 128, 224, 164, 181, 224, 164, 191, 224, 164, 182, 224, 165, 141, 224, 164, 181, 224, 164, 176, 224, 164, 190, 224, 164, 164, 224, 165, 135, 224, 164, 130, 224, 164, 166, 224, 165, 136, 224, 164, 159, 224, 165, 141, 224, 164, 184, 224, 164, 168, 224, 164, 149, 224, 165, 141, 224, 164, 182, 224, 164, 190, 224, 164, 184, 224, 164, 190, 224, 164, 174, 224, 164, 168, 224, 165, 135, 224, 164, 133, 224, 164, 166, 224, 164, 190, 224, 164, 178, 224, 164, 164, 224, 164, 172, 224, 164, 191, 224, 164, 156, 224, 164, 178, 224, 165, 128, 224, 164, 170, 224, 165, 129, 224, 164, 176, 224, 165, 130, 224, 164, 183, 224, 164, 185, 224, 164, 191, 224, 164, 130, 224, 164, 166, 224, 165, 128, 224, 164, 174, 224, 164, 191, 224, 164, 164, 224, 165, 141, 224, 164, 176, 224, 164, 149, 224, 164, 181, 224, 164, 191, 224, 164, 164, 224, 164, 190, 224, 164, 176, 224, 165, 129, 224, 164, 170, 224, 164, 175, 224, 165, 135, 224, 164, 184, 224, 165, 141, 224, 164, 165, 224, 164, 190, 224, 164, 168, 224, 164, 149, 224, 164, 176, 224, 165, 139, 224, 164, 161, 224, 164, 188, 224, 164, 174, 224, 165, 129, 224, 164, 149, 224, 165, 141, 224, 164, 164, 224, 164, 175, 224, 165, 139, 224, 164, 156, 224, 164, 168, 224, 164, 190, 224, 164, 149, 224, 165, 131, 224, 164, 170, 224, 164, 175, 224, 164, 190, 224, 164, 170, 224, 165, 139, 224, 164, 184, 224, 165, 141, 224, 164, 159, 224, 164, 152, 224, 164, 176, 224, 165, 135, 224, 164, 178, 224, 165, 130, 224, 164, 149, 224, 164, 190, 224, 164, 176, 224, 165, 141, 224, 164, 175, 224, 164, 181, 224, 164, 191, 224, 164, 154, 224, 164, 190, 224, 164, 176, 224, 164, 184, 224, 165, 130, 224, 164, 154, 224, 164, 168, 224, 164, 190, 224, 164, 174, 224, 165, 130, 224, 164, 178, 224, 165, 141, 224, 164, 175, 224, 164, 166, 224, 165, 135, 224, 164, 150, 224, 165, 135, 224, 164, 130, 224, 164, 185, 224, 164, 174, 224, 165, 135, 224, 164, 182, 224, 164, 190, 224, 164, 184, 224, 165, 141, 224, 164, 149, 224, 165, 130, 224, 164, 178, 224, 164, 174, 224, 165, 136, 224, 164, 130, 224, 164, 168, 224, 165, 135, 224, 164, 164, 224, 165, 136, 224, 164, 175, 224, 164, 190, 224, 164, 176, 224, 164, 156, 224, 164, 191, 224, 164, 184, 224, 164, 149, 224, 165, 135, 114, 115, 115, 43, 120, 109, 108, 34, 32, 116, 105, 116, 108, 101, 61, 34, 45, 116, 121, 112, 101, 34, 32, 99, 111, 110, 116, 101, 110, 116, 61, 34, 116, 105, 116, 108, 101, 34, 32, 99, 111, 110, 116, 101, 110, 116, 61, 34, 97, 116, 32, 116, 104, 101, 32, 115, 97, 109, 101, 32, 116, 105, 109, 101, 46, 106, 115, 34, 62, 60, 47, 115, 99, 114, 105, 112, 116, 62, 10, 60, 34, 32, 109, 101, 116, 104, 111, 100, 61, 34, 112, 111, 115, 116, 34, 32, 60, 47, 115, 112, 97, 110, 62, 60, 47, 97, 62, 60, 47, 108, 105, 62, 118, 101, 114, 116, 105, 99, 97, 108, 45, 97, 108, 105, 103, 110, 58, 116, 47, 106, 113, 117, 101, 114, 121, 46, 109, 105, 110, 46, 106, 115, 34, 62, 46, 99, 108, 105, 99, 107, 40, 102, 117, 110, 99, 116, 105, 111, 110, 40, 32, 115, 116, 121, 108, 101, 61, 34, 112, 97, 100, 100, 105, 110, 103, 45, 125, 41, 40, 41, 59, 10, 60, 47, 115, 99, 114, 105, 112, 116, 62, 10, 60, 47, 115, 112, 97, 110, 62, 60, 97, 32, 104, 114, 101, 102, 61, 34, 60, 97, 32, 104, 114, 101, 102, 61, 34, 104, 116, 116, 112, 58, 47, 47, 41, 59, 32, 114, 101, 116, 117, 114, 110, 32, 102, 97, 108, 115, 101, 59, 116, 101, 120, 116, 45, 100, 101, 99, 111, 114, 97, 116, 105, 111, 110, 58, 32, 115, 99, 114, 111, 108, 108, 105, 110, 103, 61, 34, 110, 111, 34, 32, 98, 111, 114, 100, 101, 114, 45, 99, 111, 108, 108, 97, 112, 115, 101, 58, 97, 115, 115, 111, 99, 105, 97, 116, 101, 100, 32, 119, 105, 116, 104, 32, 66, 97, 104, 97, 115, 97, 32, 73, 110, 100, 111, 110, 101, 115, 105, 97, 69, 110, 103, 108, 105, 115, 104, 32, 108, 97, 110, 103, 117, 97, 103, 101, 60, 116, 101, 120, 116, 32, 120, 109, 108, 58, 115, 112, 97, 99, 101, 61, 46, 103, 105, 102, 34, 32, 98, 111, 114, 100, 101, 114, 61, 34, 48, 34, 60, 47, 98, 111, 100, 121, 62, 10, 60, 47, 104, 116, 109, 108, 62, 10, 111, 118, 101, 114, 102, 108, 111, 119, 58, 104, 105, 100, 100, 101, 110, 59, 105, 109, 103, 32, 115, 114, 99, 61, 34, 104, 116, 116, 112, 58, 47, 47, 97, 100, 100, 69, 118, 101, 110, 116, 76, 105, 115, 116, 101, 110, 101, 114, 114, 101, 115, 112, 111, 110, 115, 105, 98, 108, 101, 32, 102, 111, 114, 32, 115, 46, 106, 115, 34, 62, 60, 47, 115, 99, 114, 105, 112, 116, 62, 10, 47, 102, 97, 118, 105, 99, 111, 110, 46, 105, 99, 111, 34, 32, 47, 62, 111, 112, 101, 114, 97, 116, 105, 110, 103, 32, 115, 121, 115, 116, 101, 109, 34, 32, 115, 116, 121, 108, 101, 61, 34, 119, 105, 100, 116, 104, 58, 49, 116, 97, 114, 103, 101, 116, 61, 34, 95, 98, 108, 97, 110, 107, 34, 62, 83, 116, 97, 116, 101, 32, 85, 110, 105, 118, 101, 114, 115, 105, 116, 121, 116, 101, 120, 116, 45, 97, 108, 105, 103, 110, 58, 108, 101, 102, 116, 59, 10, 100, 111, 99, 117, 109, 101, 110, 116, 46, 119, 114, 105, 116, 101, 40, 44, 32, 105, 110, 99, 108, 117, 100, 105, 110, 103, 32, 116, 104, 101, 32, 97, 114, 111, 117, 110, 100, 32, 116, 104, 101, 32, 119, 111, 114, 108, 100, 41, 59, 13, 10, 60, 47, 115, 99, 114, 105, 112, 116, 62, 13, 10, 60, 34, 32, 115, 116, 121, 108, 101, 61, 34, 104, 101, 105, 103, 104, 116, 58, 59, 111, 118, 101, 114, 102, 108, 111, 119, 58, 104, 105, 100, 100, 101, 110, 109, 111, 114, 101, 32, 105, 110, 102, 111, 114, 109, 97, 116, 105, 111, 110, 97, 110, 32, 105, 110, 116, 101, 114, 110, 97, 116, 105, 111, 110, 97, 108, 97, 32, 109, 101, 109, 98, 101, 114, 32, 111, 102, 32, 116, 104, 101, 32, 111, 110, 101, 32, 111, 102, 32, 116, 104, 101, 32, 102, 105, 114, 115, 116, 99, 97, 110, 32, 98, 101, 32, 102, 111, 117, 110, 100, 32, 105, 110, 32, 60, 47, 100, 105, 118, 62, 10, 9, 9, 60, 47, 100, 105, 118, 62, 10, 100, 105, 115, 112, 108, 97, 121, 58, 32, 110, 111, 110, 101, 59, 34, 62, 34, 32, 47, 62, 10, 60, 108, 105, 110, 107, 32, 114, 101, 108, 61, 34, 10, 32, 32, 40, 102, 117, 110, 99, 116, 105, 111, 110, 40, 41, 32, 123, 116, 104, 101, 32, 49, 53, 116, 104, 32, 99, 101, 110, 116, 117, 114, 121, 46, 112, 114, 101, 118, 101, 110, 116, 68, 101, 102, 97, 117, 108, 116, 40, 108, 97, 114, 103, 101, 32, 110, 117, 109, 98, 101, 114, 32, 111, 102, 32, 66, 121, 122, 97, 110, 116, 105, 110, 101, 32, 69, 109, 112, 105, 114, 101, 46, 106, 112, 103, 124, 116, 104, 117, 109, 98, 124, 108, 101, 102, 116, 124, 118, 97, 115, 116, 32, 109, 97, 106, 111, 114, 105, 116, 121, 32, 111, 102, 109, 97, 106, 111, 114, 105, 116, 121, 32, 111, 102, 32, 116, 104, 101, 32, 32, 97, 108, 105, 103, 110, 61, 34, 99, 101, 110, 116, 101, 114, 34, 62, 85, 110, 105, 118, 101, 114, 115, 105, 116, 121, 32, 80, 114, 101, 115, 115, 100, 111, 109, 105, 110, 97, 116, 101, 100, 32, 98, 121, 32, 116, 104, 101, 83, 101, 99, 111, 110, 100, 32, 87, 111, 114, 108, 100, 32, 87, 97, 114, 100, 105, 115, 116, 114, 105, 98, 117, 116, 105, 111, 110, 32, 111, 102, 32, 115, 116, 121, 108, 101, 61, 34, 112, 111, 115, 105, 116, 105, 111, 110, 58, 116, 104, 101, 32, 114, 101, 115, 116, 32, 111, 102, 32, 116, 104, 101, 32, 99, 104, 97, 114, 97, 99, 116, 101, 114, 105, 122, 101, 100, 32, 98, 121, 32, 114, 101, 108, 61, 34, 110, 111, 102, 111, 108, 108, 111, 119, 34, 62, 100, 101, 114, 105, 118, 101, 115, 32, 102, 114, 111, 109, 32, 116, 104, 101, 114, 97, 116, 104, 101, 114, 32, 116, 104, 97, 110, 32, 116, 104, 101, 32, 97, 32, 99, 111, 109, 98, 105, 110, 97, 116, 105, 111, 110, 32, 111, 102, 115, 116, 121, 108, 101, 61, 34, 119, 105, 100, 116, 104, 58, 49, 48, 48, 69, 110, 103, 108, 105, 115, 104, 45, 115, 112, 101, 97, 107, 105, 110, 103, 99, 111, 109, 112, 117, 116, 101, 114, 32, 115, 99, 105, 101, 110, 99, 101, 98, 111, 114, 100, 101, 114, 61, 34, 48, 34, 32, 97, 108, 116, 61, 34, 116, 104, 101, 32, 101, 120, 105, 115, 116, 101, 110, 99, 101, 32, 111, 102, 68, 101, 109, 111, 99, 114, 97, 116, 105, 99, 32, 80, 97, 114, 116, 121, 34, 32, 115, 116, 121, 108, 101, 61, 34, 109, 97, 114, 103, 105, 110, 45, 70, 111, 114, 32, 116, 104, 105, 115, 32, 114, 101, 97, 115, 111, 110, 44, 46, 106, 115, 34, 62, 60, 47, 115, 99, 114, 105, 112, 116, 62, 10, 9, 115, 66, 121, 84, 97, 103, 78, 97, 109, 101, 40, 115, 41, 91, 48, 93, 106, 115, 34, 62, 60, 47, 115, 99, 114, 105, 112, 116, 62, 13, 10, 60, 46, 106, 115, 34, 62, 60, 47, 115, 99, 114, 105, 112, 116, 62, 13, 10, 108, 105, 110, 107, 32, 114, 101, 108, 61, 34, 105, 99, 111, 110, 34, 32, 39, 32, 97, 108, 116, 61, 39, 39, 32, 99, 108, 97, 115, 115, 61, 39, 102, 111, 114, 109, 97, 116, 105, 111, 110, 32, 111, 102, 32, 116, 104, 101, 118, 101, 114, 115, 105, 111, 110, 115, 32, 111, 102, 32, 116, 104, 101, 32, 60, 47, 97, 62, 60, 47, 100, 105, 118, 62, 60, 47, 100, 105, 118, 62, 47, 112, 97, 103, 101, 62, 10, 32, 32, 60, 112, 97, 103, 101, 62, 10, 60, 100, 105, 118, 32, 99, 108, 97, 115, 115, 61, 34, 99, 111, 110, 116, 98, 101, 99, 97, 109, 101, 32, 116, 104, 101, 32, 102, 105, 114, 115, 116, 98, 97, 104, 97, 115, 97, 32, 73, 110, 100, 111, 110, 101, 115, 105, 97, 101, 110, 103, 108, 105, 115, 104, 32, 40, 115, 105, 109, 112, 108, 101, 41, 206, 149, 206, 187, 206, 187, 206, 183, 206, 189, 206, 185, 206, 186, 206, 172, 209, 133, 209, 128, 208, 178, 208, 176, 209, 130, 209, 129, 208, 186, 208, 184, 208, 186, 208, 190, 208, 188, 208, 191, 208, 176, 208, 189, 208, 184, 208, 184, 209, 143, 208, 178, 208, 187, 209, 143, 208, 181, 209, 130, 209, 129, 209, 143, 208, 148, 208, 190, 208, 177, 208, 176, 208, 178, 208, 184, 209, 130, 209, 140, 209, 135, 208, 181, 208, 187, 208, 190, 208, 178, 208, 181, 208, 186, 208, 176, 209, 128, 208, 176, 208, 183, 208, 178, 208, 184, 209, 130, 208, 184, 209, 143, 208, 152, 208, 189, 209, 130, 208, 181, 209, 128, 208, 189, 208, 181, 209, 130, 208, 158, 209, 130, 208, 178, 208, 181, 209, 130, 208, 184, 209, 130, 209, 140, 208, 189, 208, 176, 208, 191, 209, 128, 208, 184, 208, 188, 208, 181, 209, 128, 208, 184, 208, 189, 209, 130, 208, 181, 209, 128, 208, 189, 208, 181, 209, 130, 208, 186, 208, 190, 209, 130, 208, 190, 209, 128, 208, 190, 208, 179, 208, 190, 209, 129, 209, 130, 209, 128, 208, 176, 208, 189, 208, 184, 209, 134, 209, 139, 208, 186, 208, 176, 209, 135, 208, 181, 209, 129, 209, 130, 208, 178, 208, 181, 209, 131, 209, 129, 208, 187, 208, 190, 208, 178, 208, 184, 209, 143, 209, 133, 208, 191, 209, 128, 208, 190, 208, 177, 208, 187, 208, 181, 208, 188, 209, 139, 208, 191, 208, 190, 208, 187, 209, 131, 209, 135, 208, 184, 209, 130, 209, 140, 209, 143, 208, 178, 208, 187, 209, 143, 209, 142, 209, 130, 209, 129, 209, 143, 208, 189, 208, 176, 208, 184, 208, 177, 208, 190, 208, 187, 208, 181, 208, 181, 208, 186, 208, 190, 208, 188, 208, 191, 208, 176, 208, 189, 208, 184, 209, 143, 208, 178, 208, 189, 208, 184, 208, 188, 208, 176, 208, 189, 208, 184, 208, 181, 209, 129, 209, 128, 208, 181, 208, 180, 209, 129, 209, 130, 208, 178, 208, 176, 216, 167, 217, 132, 217, 133, 217, 136, 216, 167, 216, 182, 217, 138, 216, 185, 216, 167, 217, 132, 216, 177, 216, 166, 217, 138, 216, 179, 217, 138, 216, 169, 216, 167, 217, 132, 216, 167, 217, 134, 216, 170, 217, 130, 216, 167, 217, 132, 217, 133, 216, 180, 216, 167, 216, 177, 217, 131, 216, 167, 216, 170, 217, 131, 216, 167, 217, 132, 216, 179, 217, 138, 216, 167, 216, 177, 216, 167, 216, 170, 216, 167, 217, 132, 217, 133, 217, 131, 216, 170, 217, 136, 216, 168, 216, 169, 216, 167, 217, 132, 216, 179, 216, 185, 217, 136, 216, 175, 217, 138, 216, 169, 216, 167, 216, 173, 216, 181, 216, 167, 216, 166, 217, 138, 216, 167, 216, 170, 216, 167, 217, 132, 216, 185, 216, 167, 217, 132, 217, 133, 217, 138, 216, 169, 216, 167, 217, 132, 216, 181, 217, 136, 216, 170, 217, 138, 216, 167, 216, 170, 216, 167, 217, 132, 216, 167, 217, 134, 216, 170, 216, 177, 217, 134, 216, 170, 216, 167, 217, 132, 216, 170, 216, 181, 216, 167, 217, 133, 217, 138, 217, 133, 216, 167, 217, 132, 216, 165, 216, 179, 217, 132, 216, 167, 217, 133, 217, 138, 216, 167, 217, 132, 217, 133, 216, 180, 216, 167, 216, 177, 217, 131, 216, 169, 216, 167, 217, 132, 217, 133, 216, 177, 216, 166, 217, 138, 216, 167, 216, 170, 114, 111, 98, 111, 116, 115, 34, 32, 99, 111, 110, 116, 101, 110, 116, 61, 34, 60, 100, 105, 118, 32, 105, 100, 61, 34, 102, 111, 111, 116, 101, 114, 34, 62, 116, 104, 101, 32, 85, 110, 105, 116, 101, 100, 32, 83, 116, 97, 116, 101, 115, 60, 105, 109, 103, 32, 115, 114, 99, 61, 34, 104, 116, 116, 112, 58, 47, 47, 46, 106, 112, 103, 124, 114, 105, 103, 104, 116, 124, 116, 104, 117, 109, 98, 124, 46, 106, 115, 34, 62, 60, 47, 115, 99, 114, 105, 112, 116, 62, 13, 10, 60, 108, 111, 99, 97, 116, 105, 111, 110, 46, 112, 114, 111, 116, 111, 99, 111, 108, 102, 114, 97, 109, 101, 98, 111, 114, 100, 101, 114, 61, 34, 48, 34, 32, 115, 34, 32, 47, 62, 10, 60, 109, 101, 116, 97, 32, 110, 97, 109, 101, 61, 34, 60, 47, 97, 62, 60, 47, 100, 105, 118, 62, 60, 47, 100, 105, 118, 62, 60, 102, 111, 110, 116, 45, 119, 101, 105, 103, 104, 116, 58, 98, 111, 108, 100, 59, 38, 113, 117, 111, 116, 59, 32, 97, 110, 100, 32, 38, 113, 117, 111, 116, 59, 100, 101, 112, 101, 110, 100, 105, 110, 103, 32, 111, 110, 32, 116, 104, 101, 32, 109, 97, 114, 103, 105, 110, 58, 48, 59, 112, 97, 100, 100, 105, 110, 103, 58, 34, 32, 114, 101, 108, 61, 34, 110, 111, 102, 111, 108, 108, 111, 119, 34, 32, 80, 114, 101, 115, 105, 100, 101, 110, 116, 32, 111, 102, 32, 116, 104, 101, 32, 116, 119, 101, 110, 116, 105, 101, 116, 104, 32, 99, 101, 110, 116, 117, 114, 121, 101, 118, 105, 115, 105, 111, 110, 62, 10, 32, 32, 60, 47, 112, 97, 103, 101, 73, 110, 116, 101, 114, 110, 101, 116, 32, 69, 120, 112, 108, 111, 114, 101, 114, 97, 46, 97, 115, 121, 110, 99, 32, 61, 32, 116, 114, 117, 101, 59, 13, 10, 105, 110, 102, 111, 114, 109, 97, 116, 105, 111, 110, 32, 97, 98, 111, 117, 116, 60, 100, 105, 118, 32, 105, 100, 61, 34, 104, 101, 97, 100, 101, 114, 34, 62, 34, 32, 97, 99, 116, 105, 111, 110, 61, 34, 104, 116, 116, 112, 58, 47, 47, 60, 97, 32, 104, 114, 101, 102, 61, 34, 104, 116, 116, 112, 115, 58, 47, 47, 60, 100, 105, 118, 32, 105, 100, 61, 34, 99, 111, 110, 116, 101, 110, 116, 34, 60, 47, 100, 105, 118, 62, 13, 10, 60, 47, 100, 105, 118, 62, 13, 10, 60, 100, 101, 114, 105, 118, 101, 100, 32, 102, 114, 111, 109, 32, 116, 104, 101, 32, 60, 105, 109, 103, 32, 115, 114, 99, 61, 39, 104, 116, 116, 112, 58, 47, 47, 97, 99, 99, 111, 114, 100, 105, 110, 103, 32, 116, 111, 32, 116, 104, 101, 32, 10, 60, 47, 98, 111, 100, 121, 62, 10, 60, 47, 104, 116, 109, 108, 62, 10, 115, 116, 121, 108, 101, 61, 34, 102, 111, 110, 116, 45, 115, 105, 122, 101, 58, 115, 99, 114, 105, 112, 116, 32, 108, 97, 110, 103, 117, 97, 103, 101, 61, 34, 65, 114, 105, 97, 108, 44, 32, 72, 101, 108, 118, 101, 116, 105, 99, 97, 44, 60, 47, 97, 62, 60, 115, 112, 97, 110, 32, 99, 108, 97, 115, 115, 61, 34, 60, 47, 115, 99, 114, 105, 112, 116, 62, 60, 115, 99, 114, 105, 112, 116, 32, 112, 111, 108, 105, 116, 105, 99, 97, 108, 32, 112, 97, 114, 116, 105, 101, 115, 116, 100, 62, 60, 47, 116, 114, 62, 60, 47, 116, 97, 98, 108, 101, 62, 60, 104, 114, 101, 102, 61, 34, 104, 116, 116, 112, 58, 47, 47, 119, 119, 119, 46, 105, 110, 116, 101, 114, 112, 114, 101, 116, 97, 116, 105, 111, 110, 32, 111, 102, 114, 101, 108, 61, 34, 115, 116, 121, 108, 101, 115, 104, 101, 101, 116, 34, 32, 100, 111, 99, 117, 109, 101, 110, 116, 46, 119, 114, 105, 116, 101, 40, 39, 60, 99, 104, 97, 114, 115, 101, 116, 61, 34, 117, 116, 102, 45, 56, 34, 62, 10, 98, 101, 103, 105, 110, 110, 105, 110, 103, 32, 111, 102, 32, 116, 104, 101, 32, 114, 101, 118, 101, 97, 108, 101, 100, 32, 116, 104, 97, 116, 32, 116, 104, 101, 116, 101, 108, 101, 118, 105, 115, 105, 111, 110, 32, 115, 101, 114, 105, 101, 115, 34, 32, 114, 101, 108, 61, 34, 110, 111, 102, 111, 108, 108, 111, 119, 34, 62, 32, 116, 97, 114, 103, 101, 116, 61, 34, 95, 98, 108, 97, 110, 107, 34, 62, 99, 108, 97, 105, 109, 105, 110, 103, 32, 116, 104, 97, 116, 32, 116, 104, 101, 104, 116, 116, 112, 37, 51, 65, 37, 50, 70, 37, 50, 70, 119, 119, 119, 46, 109, 97, 110, 105, 102, 101, 115, 116, 97, 116, 105, 111, 110, 115, 32, 111, 102, 80, 114, 105, 109, 101, 32, 77, 105, 110, 105, 115, 116, 101, 114, 32, 111, 102, 105, 110, 102, 108, 117, 101, 110, 99, 101, 100, 32, 98, 121, 32, 116, 104, 101, 99, 108, 97, 115, 115, 61, 34, 99, 108, 101, 97, 114, 102, 105, 120, 34, 62, 47, 100, 105, 118, 62, 13, 10, 60, 47, 100, 105, 118, 62, 13, 10, 13, 10, 116, 104, 114, 101, 101, 45, 100, 105, 109, 101, 110, 115, 105, 111, 110, 97, 108, 67, 104, 117, 114, 99, 104, 32, 111, 102, 32, 69, 110, 103, 108, 97, 110, 100, 111, 102, 32, 78, 111, 114, 116, 104, 32, 67, 97, 114, 111, 108, 105, 110, 97, 115, 113, 117, 97, 114, 101, 32, 107, 105, 108, 111, 109, 101, 116, 114, 101, 115, 46, 97, 100, 100, 69, 118, 101, 110, 116, 76, 105, 115, 116, 101, 110, 101, 114, 100, 105, 115, 116, 105, 110, 99, 116, 32, 102, 114, 111, 109, 32, 116, 104, 101, 99, 111, 109, 109, 111, 110, 108, 121, 32, 107, 110, 111, 119, 110, 32, 97, 115, 80, 104, 111, 110, 101, 116, 105, 99, 32, 65, 108, 112, 104, 97, 98, 101, 116, 100, 101, 99, 108, 97, 114, 101, 100, 32, 116, 104, 97, 116, 32, 116, 104, 101, 99, 111, 110, 116, 114, 111, 108, 108, 101, 100, 32, 98, 121, 32, 116, 104, 101, 66, 101, 110, 106, 97, 109, 105, 110, 32, 70, 114, 97, 110, 107, 108, 105, 110, 114, 111, 108, 101, 45, 112, 108, 97, 121, 105, 110, 103, 32, 103, 97, 109, 101, 116, 104, 101, 32, 85, 110, 105, 118, 101, 114, 115, 105, 116, 121, 32, 111, 102, 105, 110, 32, 87, 101, 115, 116, 101, 114, 110, 32, 69, 117, 114, 111, 112, 101, 112, 101, 114, 115, 111, 110, 97, 108, 32, 99, 111, 109, 112, 117, 116, 101, 114, 80, 114, 111, 106, 101, 99, 116, 32, 71, 117, 116, 101, 110, 98, 101, 114, 103, 114, 101, 103, 97, 114, 100, 108, 101, 115, 115, 32, 111, 102, 32, 116, 104, 101, 104, 97, 115, 32, 98, 101, 101, 110, 32, 112, 114, 111, 112, 111, 115, 101, 100, 116, 111, 103, 101, 116, 104, 101, 114, 32, 119, 105, 116, 104, 32, 116, 104, 101, 62, 60, 47, 108, 105, 62, 60, 108, 105, 32, 99, 108, 97, 115, 115, 61, 34, 105, 110, 32, 115, 111, 109, 101, 32, 99, 111, 117, 110, 116, 114, 105, 101, 115, 109, 105, 110, 46, 106, 115, 34, 62, 60, 47, 115, 99, 114, 105, 112, 116, 62, 111, 102, 32, 116, 104, 101, 32, 112, 111, 112, 117, 108, 97, 116, 105, 111, 110, 111, 102, 102, 105, 99, 105, 97, 108, 32, 108, 97, 110, 103, 117, 97, 103, 101, 60, 105, 109, 103, 32, 115, 114, 99, 61, 34, 105, 109, 97, 103, 101, 115, 47, 105, 100, 101, 110, 116, 105, 102, 105, 101, 100, 32, 98, 121, 32, 116, 104, 101, 110, 97, 116, 117, 114, 97, 108, 32, 114, 101, 115, 111, 117, 114, 99, 101, 115, 99, 108, 97, 115, 115, 105, 102, 105, 99, 97, 116, 105, 111, 110, 32, 111, 102, 99, 97, 110, 32, 98, 101, 32, 99, 111, 110, 115, 105, 100, 101, 114, 101, 100, 113, 117, 97, 110, 116, 117, 109, 32, 109, 101, 99, 104, 97, 110, 105, 99, 115, 78, 101, 118, 101, 114, 116, 104, 101, 108, 101, 115, 115, 44, 32, 116, 104, 101, 109, 105, 108, 108, 105, 111, 110, 32, 121, 101, 97, 114, 115, 32, 97, 103, 111, 60, 47, 98, 111, 100, 121, 62, 13, 10, 60, 47, 104, 116, 109, 108, 62, 13, 206, 149, 206, 187, 206, 187, 206, 183, 206, 189, 206, 185, 206, 186, 206, 172, 10, 116, 97, 107, 101, 32, 97, 100, 118, 97, 110, 116, 97, 103, 101, 32, 111, 102, 97, 110, 100, 44, 32, 97, 99, 99, 111, 114, 100, 105, 110, 103, 32, 116, 111, 97, 116, 116, 114, 105, 98, 117, 116, 101, 100, 32, 116, 111, 32, 116, 104, 101, 77, 105, 99, 114, 111, 115, 111, 102, 116, 32, 87, 105, 110, 100, 111, 119, 115, 116, 104, 101, 32, 102, 105, 114, 115, 116, 32, 99, 101, 110, 116, 117, 114, 121, 117, 110, 100, 101, 114, 32, 116, 104, 101, 32, 99, 111, 110, 116, 114, 111, 108, 100, 105, 118, 32, 99, 108, 97, 115, 115, 61, 34, 104, 101, 97, 100, 101, 114, 115, 104, 111, 114, 116, 108, 121, 32, 97, 102, 116, 101, 114, 32, 116, 104, 101, 110, 111, 116, 97, 98, 108, 101, 32, 101, 120, 99, 101, 112, 116, 105, 111, 110, 116, 101, 110, 115, 32, 111, 102, 32, 116, 104, 111, 117, 115, 97, 110, 100, 115, 115, 101, 118, 101, 114, 97, 108, 32, 100, 105, 102, 102, 101, 114, 101, 110, 116, 97, 114, 111, 117, 110, 100, 32, 116, 104, 101, 32, 119, 111, 114, 108, 100, 46, 114, 101, 97, 99, 104, 105, 110, 103, 32, 109, 105, 108, 105, 116, 97, 114, 121, 105, 115, 111, 108, 97, 116, 101, 100, 32, 102, 114, 111, 109, 32, 116, 104, 101, 111, 112, 112, 111, 115, 105, 116, 105, 111, 110, 32, 116, 111, 32, 116, 104, 101, 116, 104, 101, 32, 79, 108, 100, 32, 84, 101, 115, 116, 97, 109, 101, 110, 116, 65, 102, 114, 105, 99, 97, 110, 32, 65, 109, 101, 114, 105, 99, 97, 110, 115, 105, 110, 115, 101, 114, 116, 101, 100, 32, 105, 110, 116, 111, 32, 116, 104, 101, 115, 101, 112, 97, 114, 97, 116, 101, 32, 102, 114, 111, 109, 32, 116, 104, 101, 109, 101, 116, 114, 111, 112, 111, 108, 105, 116, 97, 110, 32, 97, 114, 101, 97, 109, 97, 107, 101, 115, 32, 105, 116, 32, 112, 111, 115, 115, 105, 98, 108, 101, 97, 99, 107, 110, 111, 119, 108, 101, 100, 103, 101, 100, 32, 116, 104, 97, 116, 97, 114, 103, 117, 97, 98, 108, 121, 32, 116, 104, 101, 32, 109, 111, 115, 116, 116, 121, 112, 101, 61, 34, 116, 101, 120, 116, 47, 99, 115, 115, 34, 62, 10, 116, 104, 101, 32, 73, 110, 116, 101, 114, 110, 97, 116, 105, 111, 110, 97, 108, 65, 99, 99, 111, 114, 100, 105, 110, 103, 32, 116, 111, 32, 116, 104, 101, 32, 112, 101, 61, 34, 116, 101, 120, 116, 47, 99, 115, 115, 34, 32, 47, 62, 10, 99, 111, 105, 110, 99, 105, 100, 101, 32, 119, 105, 116, 104, 32, 116, 104, 101, 116, 119, 111, 45, 116, 104, 105, 114, 100, 115, 32, 111, 102, 32, 116, 104, 101, 68, 117, 114, 105, 110, 103, 32, 116, 104, 105, 115, 32, 116, 105, 109, 101, 44, 100, 117, 114, 105, 110, 103, 32, 116, 104, 101, 32, 112, 101, 114, 105, 111, 100, 97, 110, 110, 111, 117, 110, 99, 101, 100, 32, 116, 104, 97, 116, 32, 104, 101, 116, 104, 101, 32, 105, 110, 116, 101, 114, 110, 97, 116, 105, 111, 110, 97, 108, 97, 110, 100, 32, 109, 111, 114, 101, 32, 114, 101, 99, 101, 110, 116, 108, 121, 98, 101, 108, 105, 101, 118, 101, 100, 32, 116, 104, 97, 116, 32, 116, 104, 101, 99, 111, 110, 115, 99, 105, 111, 117, 115, 110, 101, 115, 115, 32, 97, 110, 100, 102, 111, 114, 109, 101, 114, 108, 121, 32, 107, 110, 111, 119, 110, 32, 97, 115, 115, 117, 114, 114, 111, 117, 110, 100, 101, 100, 32, 98, 121, 32, 116, 104, 101, 102, 105, 114, 115, 116, 32, 97, 112, 112, 101, 97, 114, 101, 100, 32, 105, 110, 111, 99, 99, 97, 115, 105, 111, 110, 97, 108, 108, 121, 32, 117, 115, 101, 100, 112, 111, 115, 105, 116, 105, 111, 110, 58, 97, 98, 115, 111, 108, 117, 116, 101, 59, 34, 32, 116, 97, 114, 103, 101, 116, 61, 34, 95, 98, 108, 97, 110, 107, 34, 32, 112, 111, 115, 105, 116, 105, 111, 110, 58, 114, 101, 108, 97, 116, 105, 118, 101, 59, 116, 101, 120, 116, 45, 97, 108, 105, 103, 110, 58, 99, 101, 110, 116, 101, 114, 59, 106, 97, 120, 47, 108, 105, 98, 115, 47, 106, 113, 117, 101, 114, 121, 47, 49, 46, 98, 97, 99, 107, 103, 114, 111, 117, 110, 100, 45, 99, 111, 108, 111, 114, 58, 35, 116, 121, 112, 101, 61, 34, 97, 112, 112, 108, 105, 99, 97, 116, 105, 111, 110, 47, 97, 110, 103, 117, 97, 103, 101, 34, 32, 99, 111, 110, 116, 101, 110, 116, 61, 34, 60, 109, 101, 116, 97, 32, 104, 116, 116, 112, 45, 101, 113, 117, 105, 118, 61, 34, 80, 114, 105, 118, 97, 99, 121, 32, 80, 111, 108, 105, 99, 121, 60, 47, 97, 62, 101, 40, 34, 37, 51, 67, 115, 99, 114, 105, 112, 116, 32, 115, 114, 99, 61, 39, 34, 32, 116, 97, 114, 103, 101, 116, 61, 34, 95, 98, 108, 97, 110, 107, 34, 62, 79, 110, 32, 116, 104, 101, 32, 111, 116, 104, 101, 114, 32, 104, 97, 110, 100, 44, 46, 106, 112, 103, 124, 116, 104, 117, 109, 98, 124, 114, 105, 103, 104, 116, 124, 50, 60, 47, 100, 105, 118, 62, 60, 100, 105, 118, 32, 99, 108, 97, 115, 115, 61, 34, 60, 100, 105, 118, 32, 115, 116, 121, 108, 101, 61, 34, 102, 108, 111, 97, 116, 58, 110, 105, 110, 101, 116, 101, 101, 110, 116, 104, 32, 99, 101, 110, 116, 117, 114, 121, 60, 47, 98, 111, 100, 121, 62, 13, 10, 60, 47, 104, 116, 109, 108, 62, 13, 10, 60, 105, 109, 103, 32, 115, 114, 99, 61, 34, 104, 116, 116, 112, 58, 47, 47, 115, 59, 116, 101, 120, 116, 45, 97, 108, 105, 103, 110, 58, 99, 101, 110, 116, 101, 114, 102, 111, 110, 116, 45, 119, 101, 105, 103, 104, 116, 58, 32, 98, 111, 108, 100, 59, 32, 65, 99, 99, 111, 114, 100, 105, 110, 103, 32, 116, 111, 32, 116, 104, 101, 32, 100, 105, 102, 102, 101, 114, 101, 110, 99, 101, 32, 98, 101, 116, 119, 101, 101, 110, 34, 32, 102, 114, 97, 109, 101, 98, 111, 114, 100, 101, 114, 61, 34, 48, 34, 32, 34, 32, 115, 116, 121, 108, 101, 61, 34, 112, 111, 115, 105, 116, 105, 111, 110, 58, 108, 105, 110, 107, 32, 104, 114, 101, 102, 61, 34, 104, 116, 116, 112, 58, 47, 47, 104, 116, 109, 108, 52, 47, 108, 111, 111, 115, 101, 46, 100, 116, 100, 34, 62, 10, 100, 117, 114, 105, 110, 103, 32, 116, 104, 105, 115, 32, 112, 101, 114, 105, 111, 100, 60, 47, 116, 100, 62, 60, 47, 116, 114, 62, 60, 47, 116, 97, 98, 108, 101, 62, 99, 108, 111, 115, 101, 108, 121, 32, 114, 101, 108, 97, 116, 101, 100, 32, 116, 111, 102, 111, 114, 32, 116, 104, 101, 32, 102, 105, 114, 115, 116, 32, 116, 105, 109, 101, 59, 102, 111, 110, 116, 45, 119, 101, 105, 103, 104, 116, 58, 98, 111, 108, 100, 59, 105, 110, 112, 117, 116, 32, 116, 121, 112, 101, 61, 34, 116, 101, 120, 116, 34, 32, 60, 115, 112, 97, 110, 32, 115, 116, 121, 108, 101, 61, 34, 102, 111, 110, 116, 45, 111, 110, 114, 101, 97, 100, 121, 115, 116, 97, 116, 101, 99, 104, 97, 110, 103, 101, 9, 60, 100, 105, 118, 32, 99, 108, 97, 115, 115, 61, 34, 99, 108, 101, 97, 114, 100, 111, 99, 117, 109, 101, 110, 116, 46, 108, 111, 99, 97, 116, 105, 111, 110, 46, 32, 70, 111, 114, 32, 101, 120, 97, 109, 112, 108, 101, 44, 32, 116, 104, 101, 32, 97, 32, 119, 105, 100, 101, 32, 118, 97, 114, 105, 101, 116, 121, 32, 111, 102, 32, 60, 33, 68, 79, 67, 84, 89, 80, 69, 32, 104, 116, 109, 108, 62, 13, 10, 60, 38, 110, 98, 115, 112, 59, 38, 110, 98, 115, 112, 59, 38, 110, 98, 115, 112, 59, 34, 62, 60, 97, 32, 104, 114, 101, 102, 61, 34, 104, 116, 116, 112, 58, 47, 47, 115, 116, 121, 108, 101, 61, 34, 102, 108, 111, 97, 116, 58, 108, 101, 102, 116, 59, 99, 111, 110, 99, 101, 114, 110, 101, 100, 32, 119, 105, 116, 104, 32, 116, 104, 101, 61, 104, 116, 116, 112, 37, 51, 65, 37, 50, 70, 37, 50, 70, 119, 119, 119, 46, 105, 110, 32, 112, 111, 112, 117, 108, 97, 114, 32, 99, 117, 108, 116, 117, 114, 101, 116, 121, 112, 101, 61, 34, 116, 101, 120, 116, 47, 99, 115, 115, 34, 32, 47, 62, 105, 116, 32, 105, 115, 32, 112, 111, 115, 115, 105, 98, 108, 101, 32, 116, 111, 32, 72, 97, 114, 118, 97, 114, 100, 32, 85, 110, 105, 118, 101, 114, 115, 105, 116, 121, 116, 121, 108, 101, 115, 104, 101, 101, 116, 34, 32, 104, 114, 101, 102, 61, 34, 47, 116, 104, 101, 32, 109, 97, 105, 110, 32, 99, 104, 97, 114, 97, 99, 116, 101, 114, 79, 120, 102, 111, 114, 100, 32, 85, 110, 105, 118, 101, 114, 115, 105, 116, 121, 32, 32, 110, 97, 109, 101, 61, 34, 107, 101, 121, 119, 111, 114, 100, 115, 34, 32, 99, 115, 116, 121, 108, 101, 61, 34, 116, 101, 120, 116, 45, 97, 108, 105, 103, 110, 58, 116, 104, 101, 32, 85, 110, 105, 116, 101, 100, 32, 75, 105, 110, 103, 100, 111, 109, 102, 101, 100, 101, 114, 97, 108, 32, 103, 111, 118, 101, 114, 110, 109, 101, 110, 116, 60, 100, 105, 118, 32, 115, 116, 121, 108, 101, 61, 34, 109, 97, 114, 103, 105, 110, 32, 100, 101, 112, 101, 110, 100, 105, 110, 103, 32, 111, 110, 32, 116, 104, 101, 32, 100, 101, 115, 99, 114, 105, 112, 116, 105, 111, 110, 32, 111, 102, 32, 116, 104, 101, 60, 100, 105, 118, 32, 99, 108, 97, 115, 115, 61, 34, 104, 101, 97, 100, 101, 114, 46, 109, 105, 110, 46, 106, 115, 34, 62, 60, 47, 115, 99, 114, 105, 112, 116, 62, 100, 101, 115, 116, 114, 117, 99, 116, 105, 111, 110, 32, 111, 102, 32, 116, 104, 101, 115, 108, 105, 103, 104, 116, 108, 121, 32, 100, 105, 102, 102, 101, 114, 101, 110, 116, 105, 110, 32, 97, 99, 99, 111, 114, 100, 97, 110, 99, 101, 32, 119, 105, 116, 104, 116, 101, 108, 101, 99, 111, 109, 109, 117, 110, 105, 99, 97, 116, 105, 111, 110, 115, 105, 110, 100, 105, 99, 97, 116, 101, 115, 32, 116, 104, 97, 116, 32, 116, 104, 101, 115, 104, 111, 114, 116, 108, 121, 32, 116, 104, 101, 114, 101, 97, 102, 116, 101, 114, 101, 115, 112, 101, 99, 105, 97, 108, 108, 121, 32, 105, 110, 32, 116, 104, 101, 32, 69, 117, 114, 111, 112, 101, 97, 110, 32, 99, 111, 117, 110, 116, 114, 105, 101, 115, 72, 111, 119, 101, 118, 101, 114, 44, 32, 116, 104, 101, 114, 101, 32, 97, 114, 101, 115, 114, 99, 61, 34, 104, 116, 116, 112, 58, 47, 47, 115, 116, 97, 116, 105, 99, 115, 117, 103, 103, 101, 115, 116, 101, 100, 32, 116, 104, 97, 116, 32, 116, 104, 101, 34, 32, 115, 114, 99, 61, 34, 104, 116, 116, 112, 58, 47, 47, 119, 119, 119, 46, 97, 32, 108, 97, 114, 103, 101, 32, 110, 117, 109, 98, 101, 114, 32, 111, 102, 32, 84, 101, 108, 101, 99, 111, 109, 109, 117, 110, 105, 99, 97, 116, 105, 111, 110, 115, 34, 32, 114, 101, 108, 61, 34, 110, 111, 102, 111, 108, 108, 111, 119, 34, 32, 116, 72, 111, 108, 121, 32, 82, 111, 109, 97, 110, 32, 69, 109, 112, 101, 114, 111, 114, 97, 108, 109, 111, 115, 116, 32, 101, 120, 99, 108, 117, 115, 105, 118, 101, 108, 121, 34, 32, 98, 111, 114, 100, 101, 114, 61, 34, 48, 34, 32, 97, 108, 116, 61, 34, 83, 101, 99, 114, 101, 116, 97, 114, 121, 32, 111, 102, 32, 83, 116, 97, 116, 101, 99, 117, 108, 109, 105, 110, 97, 116, 105, 110, 103, 32, 105, 110, 32, 116, 104, 101, 67, 73, 65, 32, 87, 111, 114, 108, 100, 32, 70, 97, 99, 116, 98, 111, 111, 107, 116, 104, 101, 32, 109, 111, 115, 116, 32, 105, 109, 112, 111, 114, 116, 97, 110, 116, 97, 110, 110, 105, 118, 101, 114, 115, 97, 114, 121, 32, 111, 102, 32, 116, 104, 101, 115, 116, 121, 108, 101, 61, 34, 98, 97, 99, 107, 103, 114, 111, 117, 110, 100, 45, 60, 108, 105, 62, 60, 101, 109, 62, 60, 97, 32, 104, 114, 101, 102, 61, 34, 47, 116, 104, 101, 32, 65, 116, 108, 97, 110, 116, 105, 99, 32, 79, 99, 101, 97, 110, 115, 116, 114, 105, 99, 116, 108, 121, 32, 115, 112, 101, 97, 107, 105, 110, 103, 44, 115, 104, 111, 114, 116, 108, 121, 32, 98, 101, 102, 111, 114, 101, 32, 116, 104, 101, 100, 105, 102, 102, 101, 114, 101, 110, 116, 32, 116, 121, 112, 101, 115, 32, 111, 102, 116, 104, 101, 32, 79, 116, 116, 111, 109, 97, 110, 32, 69, 109, 112, 105, 114, 101, 62, 60, 105, 109, 103, 32, 115, 114, 99, 61, 34, 104, 116, 116, 112, 58, 47, 47, 65, 110, 32, 73, 110, 116, 114, 111, 100, 117, 99, 116, 105, 111, 110, 32, 116, 111, 99, 111, 110, 115, 101, 113, 117, 101, 110, 99, 101, 32, 111, 102, 32, 116, 104, 101, 100, 101, 112, 97, 114, 116, 117, 114, 101, 32, 102, 114, 111, 109, 32, 116, 104, 101, 67, 111, 110, 102, 101, 100, 101, 114, 97, 116, 101, 32, 83, 116, 97, 116, 101, 115, 105, 110, 100, 105, 103, 101, 110, 111, 117, 115, 32, 112, 101, 111, 112, 108, 101, 115, 80, 114, 111, 99, 101, 101, 100, 105, 110, 103, 115, 32, 111, 102, 32, 116, 104, 101, 105, 110, 102, 111, 114, 109, 97, 116, 105, 111, 110, 32, 111, 110, 32, 116, 104, 101, 116, 104, 101, 111, 114, 105, 101, 115, 32, 104, 97, 118, 101, 32, 98, 101, 101, 110, 105, 110, 118, 111, 108, 118, 101, 109, 101, 110, 116, 32, 105, 110, 32, 116, 104, 101, 100, 105, 118, 105, 100, 101, 100, 32, 105, 110, 116, 111, 32, 116, 104, 114, 101, 101, 97, 100, 106, 97, 99, 101, 110, 116, 32, 99, 111, 117, 110, 116, 114, 105, 101, 115, 105, 115, 32, 114, 101, 115, 112, 111, 110, 115, 105, 98, 108, 101, 32, 102, 111, 114, 100, 105, 115, 115, 111, 108, 117, 116, 105, 111, 110, 32, 111, 102, 32, 116, 104, 101, 99, 111, 108, 108, 97, 98, 111, 114, 97, 116, 105, 111, 110, 32, 119, 105, 116, 104, 119, 105, 100, 101, 108, 121, 32, 114, 101, 103, 97, 114, 100, 101, 100, 32, 97, 115, 104, 105, 115, 32, 99, 111, 110, 116, 101, 109, 112, 111, 114, 97, 114, 105, 101, 115, 102, 111, 117, 110, 100, 105, 110, 103, 32, 109, 101, 109, 98, 101, 114, 32, 111, 102, 68, 111, 109, 105, 110, 105, 99, 97, 110, 32, 82, 101, 112, 117, 98, 108, 105, 99, 103, 101, 110, 101, 114, 97, 108, 108, 121, 32, 97, 99, 99, 101, 112, 116, 101, 100, 116, 104, 101, 32, 112, 111, 115, 115, 105, 98, 105, 108, 105, 116, 121, 32, 111, 102, 97, 114, 101, 32, 97, 108, 115, 111, 32, 97, 118, 97, 105, 108, 97, 98, 108, 101, 117, 110, 100, 101, 114, 32, 99, 111, 110, 115, 116, 114, 117, 99, 116, 105, 111, 110, 114, 101, 115, 116, 111, 114, 97, 116, 105, 111, 110, 32, 111, 102, 32, 116, 104, 101, 116, 104, 101, 32, 103, 101, 110, 101, 114, 97, 108, 32, 112, 117, 98, 108, 105, 99, 105, 115, 32, 97, 108, 109, 111, 115, 116, 32, 101, 110, 116, 105, 114, 101, 108, 121, 112, 97, 115, 115, 101, 115, 32, 116, 104, 114, 111, 117, 103, 104, 32, 116, 104, 101, 104, 97, 115, 32, 98, 101, 101, 110, 32, 115, 117, 103, 103, 101, 115, 116, 101, 100, 99, 111, 109, 112, 117, 116, 101, 114, 32, 97, 110, 100, 32, 118, 105, 100, 101, 111, 71, 101, 114, 109, 97, 110, 105, 99, 32, 108, 97, 110, 103, 117, 97, 103, 101, 115, 32, 97, 99, 99, 111, 114, 100, 105, 110, 103, 32, 116, 111, 32, 116, 104, 101, 32, 100, 105, 102, 102, 101, 114, 101, 110, 116, 32, 102, 114, 111, 109, 32, 116, 104, 101, 115, 104, 111, 114, 116, 108, 121, 32, 97, 102, 116, 101, 114, 119, 97, 114, 100, 115, 104, 114, 101, 102, 61, 34, 104, 116, 116, 112, 115, 58, 47, 47, 119, 119, 119, 46, 114, 101, 99, 101, 110, 116, 32, 100, 101, 118, 101, 108, 111, 112, 109, 101, 110, 116, 66, 111, 97, 114, 100, 32, 111, 102, 32, 68, 105, 114, 101, 99, 116, 111, 114, 115, 60, 100, 105, 118, 32, 99, 108, 97, 115, 115, 61, 34, 115, 101, 97, 114, 99, 104, 124, 32, 60, 97, 32, 104, 114, 101, 102, 61, 34, 104, 116, 116, 112, 58, 47, 47, 73, 110, 32, 112, 97, 114, 116, 105, 99, 117, 108, 97, 114, 44, 32, 116, 104, 101, 77, 117, 108, 116, 105, 112, 108, 101, 32, 102, 111, 111, 116, 110, 111, 116, 101, 115, 111, 114, 32, 111, 116, 104, 101, 114, 32, 115, 117, 98, 115, 116, 97, 110, 99, 101, 116, 104, 111, 117, 115, 97, 110, 100, 115, 32, 111, 102, 32, 121, 101, 97, 114, 115, 116, 114, 97, 110, 115, 108, 97, 116, 105, 111, 110, 32, 111, 102, 32, 116, 104, 101, 60, 47, 100, 105, 118, 62, 13, 10, 60, 47, 100, 105, 118, 62, 13, 10, 13, 10, 60, 97, 32, 104, 114, 101, 102, 61, 34, 105, 110, 100, 101, 120, 46, 112, 104, 112, 119, 97, 115, 32, 101, 115, 116, 97, 98, 108, 105, 115, 104, 101, 100, 32, 105, 110, 109, 105, 110, 46, 106, 115, 34, 62, 60, 47, 115, 99, 114, 105, 112, 116, 62, 10, 112, 97, 114, 116, 105, 99, 105, 112, 97, 116, 101, 32, 105, 110, 32, 116, 104, 101, 97, 32, 115, 116, 114, 111, 110, 103, 32, 105, 110, 102, 108, 117, 101, 110, 99, 101, 115, 116, 121, 108, 101, 61, 34, 109, 97, 114, 103, 105, 110, 45, 116, 111, 112, 58, 114, 101, 112, 114, 101, 115, 101, 110, 116, 101, 100, 32, 98, 121, 32, 116, 104, 101, 103, 114, 97, 100, 117, 97, 116, 101, 100, 32, 102, 114, 111, 109, 32, 116, 104, 101, 84, 114, 97, 100, 105, 116, 105, 111, 110, 97, 108, 108, 121, 44, 32, 116, 104, 101, 69, 108, 101, 109, 101, 110, 116, 40, 34, 115, 99, 114, 105, 112, 116, 34, 41, 59, 72, 111, 119, 101, 118, 101, 114, 44, 32, 115, 105, 110, 99, 101, 32, 116, 104, 101, 47, 100, 105, 118, 62, 10, 60, 47, 100, 105, 118, 62, 10, 60, 100, 105, 118, 32, 108, 101, 102, 116, 59, 32, 109, 97, 114, 103, 105, 110, 45, 108, 101, 102, 116, 58, 112, 114, 111, 116, 101, 99, 116, 105, 111, 110, 32, 97, 103, 97, 105, 110, 115, 116, 48, 59, 32, 118, 101, 114, 116, 105, 99, 97, 108, 45, 97, 108, 105, 103, 110, 58, 85, 110, 102, 111, 114, 116, 117, 110, 97, 116, 101, 108, 121, 44, 32, 116, 104, 101, 116, 121, 112, 101, 61, 34, 105, 109, 97, 103, 101, 47, 120, 45, 105, 99, 111, 110, 47, 100, 105, 118, 62, 10, 60, 100, 105, 118, 32, 99, 108, 97, 115, 115, 61, 34, 32, 99, 108, 97, 115, 115, 61, 34, 99, 108, 101, 97, 114, 102, 105, 120, 34, 62, 60, 100, 105, 118, 32, 99, 108, 97, 115, 115, 61, 34, 102, 111, 111, 116, 101, 114, 9, 9, 60, 47, 100, 105, 118, 62, 10, 9, 9, 60, 47, 100, 105, 118, 62, 10, 116, 104, 101, 32, 109, 111, 116, 105, 111, 110, 32, 112, 105, 99, 116, 117, 114, 101, 208, 145, 209, 138, 208, 187, 208, 179, 208, 176, 209, 128, 209, 129, 208, 186, 208, 184, 208, 177, 209, 138, 208, 187, 208, 179, 208, 176, 209, 128, 209, 129, 208, 186, 208, 184, 208, 164, 208, 181, 208, 180, 208, 181, 209, 128, 208, 176, 209, 134, 208, 184, 208, 184, 208, 189, 208, 181, 209, 129, 208, 186, 208, 190, 208, 187, 209, 140, 208, 186, 208, 190, 209, 129, 208, 190, 208, 190, 208, 177, 209, 137, 208, 181, 208, 189, 208, 184, 208, 181, 209, 129, 208, 190, 208, 190, 208, 177, 209, 137, 208, 181, 208, 189, 208, 184, 209, 143, 208, 191, 209, 128, 208, 190, 208, 179, 209, 128, 208, 176, 208, 188, 208, 188, 209, 139, 208, 158, 209, 130, 208, 191, 209, 128, 208, 176, 208, 178, 208, 184, 209, 130, 209, 140, 208, 177, 208, 181, 209, 129, 208, 191, 208, 187, 208, 176, 209, 130, 208, 189, 208, 190, 208, 188, 208, 176, 209, 130, 208, 181, 209, 128, 208, 184, 208, 176, 208, 187, 209, 139, 208, 191, 208, 190, 208, 183, 208, 178, 208, 190, 208, 187, 209, 143, 208, 181, 209, 130, 208, 191, 208, 190, 209, 129, 208, 187, 208, 181, 208, 180, 208, 189, 208, 184, 208, 181, 209, 128, 208, 176, 208, 183, 208, 187, 208, 184, 209, 135, 208, 189, 209, 139, 209, 133, 208, 191, 209, 128, 208, 190, 208, 180, 209, 131, 208, 186, 209, 134, 208, 184, 208, 184, 208, 191, 209, 128, 208, 190, 208, 179, 209, 128, 208, 176, 208, 188, 208, 188, 208, 176, 208, 191, 208, 190, 208, 187, 208, 189, 208, 190, 209, 129, 209, 130, 209, 140, 209, 142, 208, 189, 208, 176, 209, 133, 208, 190, 208, 180, 208, 184, 209, 130, 209, 129, 209, 143, 208, 184, 208, 183, 208, 177, 209, 128, 208, 176, 208, 189, 208, 189, 208, 190, 208, 181, 208, 189, 208, 176, 209, 129, 208, 181, 208, 187, 208, 181, 208, 189, 208, 184, 209, 143, 208, 184, 208, 183, 208, 188, 208, 181, 208, 189, 208, 181, 208, 189, 208, 184, 209, 143, 208, 186, 208, 176, 209, 130, 208, 181, 208, 179, 208, 190, 209, 128, 208, 184, 208, 184, 208, 144, 208, 187, 208, 181, 208, 186, 209, 129, 208, 176, 208, 189, 208, 180, 209, 128, 224, 164, 166, 224, 165, 141, 224, 164, 181, 224, 164, 190, 224, 164, 176, 224, 164, 190, 224, 164, 174, 224, 165, 136, 224, 164, 168, 224, 165, 129, 224, 164, 133, 224, 164, 178, 224, 164, 170, 224, 165, 141, 224, 164, 176, 224, 164, 166, 224, 164, 190, 224, 164, 168, 224, 164, 173, 224, 164, 190, 224, 164, 176, 224, 164, 164, 224, 165, 128, 224, 164, 175, 224, 164, 133, 224, 164, 168, 224, 165, 129, 224, 164, 166, 224, 165, 135, 224, 164, 182, 224, 164, 185, 224, 164, 191, 224, 164, 168, 224, 165, 141, 224, 164, 166, 224, 165, 128, 224, 164, 135, 224, 164, 130, 224, 164, 161, 224, 164, 191, 224, 164, 175, 224, 164, 190, 224, 164, 166, 224, 164, 191, 224, 164, 178, 224, 165, 141, 224, 164, 178, 224, 165, 128, 224, 164, 133, 224, 164, 167, 224, 164, 191, 224, 164, 149, 224, 164, 190, 224, 164, 176, 224, 164, 181, 224, 165, 128, 224, 164, 161, 224, 164, 191, 224, 164, 175, 224, 165, 139, 224, 164, 154, 224, 164, 191, 224, 164, 159, 224, 165, 141, 224, 164, 160, 224, 165, 135, 224, 164, 184, 224, 164, 174, 224, 164, 190, 224, 164, 154, 224, 164, 190, 224, 164, 176, 224, 164, 156, 224, 164, 130, 224, 164, 149, 224, 165, 141, 224, 164, 182, 224, 164, 168, 224, 164, 166, 224, 165, 129, 224, 164, 168, 224, 164, 191, 224, 164, 175, 224, 164, 190, 224, 164, 170, 224, 165, 141, 224, 164, 176, 224, 164, 175, 224, 165, 139, 224, 164, 151, 224, 164, 133, 224, 164, 168, 224, 165, 129, 224, 164, 184, 224, 164, 190, 224, 164, 176, 224, 164, 145, 224, 164, 168, 224, 164, 178, 224, 164, 190, 224, 164, 135, 224, 164, 168, 224, 164, 170, 224, 164, 190, 224, 164, 176, 224, 165, 141, 224, 164, 159, 224, 165, 128, 224, 164, 182, 224, 164, 176, 224, 165, 141, 224, 164, 164, 224, 165, 139, 224, 164, 130, 224, 164, 178, 224, 165, 139, 224, 164, 149, 224, 164, 184, 224, 164, 173, 224, 164, 190, 224, 164, 171, 224, 164, 188, 224, 165, 141, 224, 164, 178, 224, 165, 136, 224, 164, 182, 224, 164, 182, 224, 164, 176, 224, 165, 141, 224, 164, 164, 224, 165, 135, 224, 164, 130, 224, 164, 170, 224, 165, 141, 224, 164, 176, 224, 164, 166, 224, 165, 135, 224, 164, 182, 224, 164, 170, 224, 165, 141, 224, 164, 178, 224, 165, 135, 224, 164, 175, 224, 164, 176, 224, 164, 149, 224, 165, 135, 224, 164, 130, 224, 164, 166, 224, 165, 141, 224, 164, 176, 224, 164, 184, 224, 165, 141, 224, 164, 165, 224, 164, 191, 224, 164, 164, 224, 164, 191, 224, 164, 137, 224, 164, 164, 224, 165, 141, 224, 164, 170, 224, 164, 190, 224, 164, 166, 224, 164, 137, 224, 164, 168, 224, 165, 141, 224, 164, 185, 224, 165, 135, 224, 164, 130, 224, 164, 154, 224, 164, 191, 224, 164, 159, 224, 165, 141, 224, 164, 160, 224, 164, 190, 224, 164, 175, 224, 164, 190, 224, 164, 164, 224, 165, 141, 224, 164, 176, 224, 164, 190, 224, 164, 156, 224, 165, 141, 224, 164, 175, 224, 164, 190, 224, 164, 166, 224, 164, 190, 224, 164, 170, 224, 165, 129, 224, 164, 176, 224, 164, 190, 224, 164, 168, 224, 165, 135, 224, 164, 156, 224, 165, 139, 224, 164, 161, 224, 164, 188, 224, 165, 135, 224, 164, 130, 224, 164, 133, 224, 164, 168, 224, 165, 129, 224, 164, 181, 224, 164, 190, 224, 164, 166, 224, 164, 182, 224, 165, 141, 224, 164, 176, 224, 165, 135, 224, 164, 163, 224, 165, 128, 224, 164, 182, 224, 164, 191, 224, 164, 149, 224, 165, 141, 224, 164, 183, 224, 164, 190, 224, 164, 184, 224, 164, 176, 224, 164, 149, 224, 164, 190, 224, 164, 176, 224, 165, 128, 224, 164, 184, 224, 164, 130, 224, 164, 151, 224, 165, 141, 224, 164, 176, 224, 164, 185, 224, 164, 170, 224, 164, 176, 224, 164, 191, 224, 164, 163, 224, 164, 190, 224, 164, 174, 224, 164, 172, 224, 165, 141, 224, 164, 176, 224, 164, 190, 224, 164, 130, 224, 164, 161, 224, 164, 172, 224, 164, 154, 224, 165, 141, 224, 164, 154, 224, 165, 139, 224, 164, 130, 224, 164, 137, 224, 164, 170, 224, 164, 178, 224, 164, 172, 224, 165, 141, 224, 164, 167, 224, 164, 174, 224, 164, 130, 224, 164, 164, 224, 165, 141, 224, 164, 176, 224, 165, 128, 224, 164, 184, 224, 164, 130, 224, 164, 170, 224, 164, 176, 224, 165, 141, 224, 164, 149, 224, 164, 137, 224, 164, 174, 224, 165, 141, 224, 164, 174, 224, 165, 128, 224, 164, 166, 224, 164, 174, 224, 164, 190, 224, 164, 167, 224, 165, 141, 224, 164, 175, 224, 164, 174, 224, 164, 184, 224, 164, 185, 224, 164, 190, 224, 164, 175, 224, 164, 164, 224, 164, 190, 224, 164, 182, 224, 164, 172, 224, 165, 141, 224, 164, 166, 224, 165, 139, 224, 164, 130, 224, 164, 174, 224, 165, 128, 224, 164, 161, 224, 164, 191, 224, 164, 175, 224, 164, 190, 224, 164, 134, 224, 164, 136, 224, 164, 170, 224, 165, 128, 224, 164, 143, 224, 164, 178, 224, 164, 174, 224, 165, 139, 224, 164, 172, 224, 164, 190, 224, 164, 135, 224, 164, 178, 224, 164, 184, 224, 164, 130, 224, 164, 150, 224, 165, 141, 224, 164, 175, 224, 164, 190, 224, 164, 134, 224, 164, 170, 224, 164, 176, 224, 165, 135, 224, 164, 182, 224, 164, 168, 224, 164, 133, 224, 164, 168, 224, 165, 129, 224, 164, 172, 224, 164, 130, 224, 164, 167, 224, 164, 172, 224, 164, 190, 224, 164, 156, 224, 164, 188, 224, 164, 190, 224, 164, 176, 224, 164, 168, 224, 164, 181, 224, 165, 128, 224, 164, 168, 224, 164, 164, 224, 164, 174, 224, 164, 170, 224, 165, 141, 224, 164, 176, 224, 164, 174, 224, 165, 129, 224, 164, 150, 224, 164, 170, 224, 165, 141, 224, 164, 176, 224, 164, 182, 224, 165, 141, 224, 164, 168, 224, 164, 170, 224, 164, 176, 224, 164, 191, 224, 164, 181, 224, 164, 190, 224, 164, 176, 224, 164, 168, 224, 165, 129, 224, 164, 149, 224, 164, 184, 224, 164, 190, 224, 164, 168, 224, 164, 184, 224, 164, 174, 224, 164, 176, 224, 165, 141, 224, 164, 165, 224, 164, 168, 224, 164, 134, 224, 164, 175, 224, 165, 139, 224, 164, 156, 224, 164, 191, 224, 164, 164, 224, 164, 184, 224, 165, 139, 224, 164, 174, 224, 164, 181, 224, 164, 190, 224, 164, 176, 216, 167, 217, 132, 217, 133, 216, 180, 216, 167, 216, 177, 217, 131, 216, 167, 216, 170, 216, 167, 217, 132, 217, 133, 217, 134, 216, 170, 216, 175, 217, 138, 216, 167, 216, 170, 216, 167, 217, 132, 217, 131, 217, 133, 216, 168, 217, 138, 217, 136, 216, 170, 216, 177, 216, 167, 217, 132, 217, 133, 216, 180, 216, 167, 217, 135, 216, 175, 216, 167, 216, 170, 216, 185, 216, 175, 216, 175, 216, 167, 217, 132, 216, 178, 217, 136, 216, 167, 216, 177, 216, 185, 216, 175, 216, 175, 216, 167, 217, 132, 216, 177, 216, 175, 217, 136, 216, 175, 216, 167, 217, 132, 216, 165, 216, 179, 217, 132, 216, 167, 217, 133, 217, 138, 216, 169, 216, 167, 217, 132, 217, 129, 217, 136, 216, 170, 217, 136, 216, 180, 217, 136, 216, 168, 216, 167, 217, 132, 217, 133, 216, 179, 216, 167, 216, 168, 217, 130, 216, 167, 216, 170, 216, 167, 217, 132, 217, 133, 216, 185, 217, 132, 217, 136, 217, 133, 216, 167, 216, 170, 216, 167, 217, 132, 217, 133, 216, 179, 217, 132, 216, 179, 217, 132, 216, 167, 216, 170, 216, 167, 217, 132, 216, 172, 216, 177, 216, 167, 217, 129, 217, 138, 217, 131, 216, 179, 216, 167, 217, 132, 216, 167, 216, 179, 217, 132, 216, 167, 217, 133, 217, 138, 216, 169, 216, 167, 217, 132, 216, 167, 216, 170, 216, 181, 216, 167, 217, 132, 216, 167, 216, 170, 107, 101, 121, 119, 111, 114, 100, 115, 34, 32, 99, 111, 110, 116, 101, 110, 116, 61, 34, 119, 51, 46, 111, 114, 103, 47, 49, 57, 57, 57, 47, 120, 104, 116, 109, 108, 34, 62, 60, 97, 32, 116, 97, 114, 103, 101, 116, 61, 34, 95, 98, 108, 97, 110, 107, 34, 32, 116, 101, 120, 116, 47, 104, 116, 109, 108, 59, 32, 99, 104, 97, 114, 115, 101, 116, 61, 34, 32, 116, 97, 114, 103, 101, 116, 61, 34, 95, 98, 108, 97, 110, 107, 34, 62, 60, 116, 97, 98, 108, 101, 32, 99, 101, 108, 108, 112, 97, 100, 100, 105, 110, 103, 61, 34, 97, 117, 116, 111, 99, 111, 109, 112, 108, 101, 116, 101, 61, 34, 111, 102, 102, 34, 32, 116, 101, 120, 116, 45, 97, 108, 105, 103, 110, 58, 32, 99, 101, 110, 116, 101, 114, 59, 116, 111, 32, 108, 97, 115, 116, 32, 118, 101, 114, 115, 105, 111, 110, 32, 98, 121, 32, 98, 97, 99, 107, 103, 114, 111, 117, 110, 100, 45, 99, 111, 108, 111, 114, 58, 32, 35, 34, 32, 104, 114, 101, 102, 61, 34, 104, 116, 116, 112, 58, 47, 47, 119, 119, 119, 46, 47, 100, 105, 118, 62, 60, 47, 100, 105, 118, 62, 60, 100, 105, 118, 32, 105, 100, 61, 60, 97, 32, 104, 114, 101, 102, 61, 34, 35, 34, 32, 99, 108, 97, 115, 115, 61, 34, 34, 62, 60, 105, 109, 103, 32, 115, 114, 99, 61, 34, 104, 116, 116, 112, 58, 47, 47, 99, 114, 105, 112, 116, 34, 32, 115, 114, 99, 61, 34, 104, 116, 116, 112, 58, 47, 47, 10, 60, 115, 99, 114, 105, 112, 116, 32, 108, 97, 110, 103, 117, 97, 103, 101, 61, 34, 47, 47, 69, 78, 34, 32, 34, 104, 116, 116, 112, 58, 47, 47, 119, 119, 119, 46, 119, 101, 110, 99, 111, 100, 101, 85, 82, 73, 67, 111, 109, 112, 111, 110, 101, 110, 116, 40, 34, 32, 104, 114, 101, 102, 61, 34, 106, 97, 118, 97, 115, 99, 114, 105, 112, 116, 58, 60, 100, 105, 118, 32, 99, 108, 97, 115, 115, 61, 34, 99, 111, 110, 116, 101, 110, 116, 100, 111, 99, 117, 109, 101, 110, 116, 46, 119, 114, 105, 116, 101, 40, 39, 60, 115, 99, 112, 111, 115, 105, 116, 105, 111, 110, 58, 32, 97, 98, 115, 111, 108, 117, 116, 101, 59, 115, 99, 114, 105, 112, 116, 32, 115, 114, 99, 61, 34, 104, 116, 116, 112, 58, 47, 47, 32, 115, 116, 121, 108, 101, 61, 34, 109, 97, 114, 103, 105, 110, 45, 116, 111, 112, 58, 46, 109, 105, 110, 46, 106, 115, 34, 62, 60, 47, 115, 99, 114, 105, 112, 116, 62, 10, 60, 47, 100, 105, 118, 62, 10, 60, 100, 105, 118, 32, 99, 108, 97, 115, 115, 61, 34, 119, 51, 46, 111, 114, 103, 47, 49, 57, 57, 57, 47, 120, 104, 116, 109, 108, 34, 32, 10, 13, 10, 60, 47, 98, 111, 100, 121, 62, 13, 10, 60, 47, 104, 116, 109, 108, 62, 100, 105, 115, 116, 105, 110, 99, 116, 105, 111, 110, 32, 98, 101, 116, 119, 101, 101, 110, 47, 34, 32, 116, 97, 114, 103, 101, 116, 61, 34, 95, 98, 108, 97, 110, 107, 34, 62, 60, 108, 105, 110, 107, 32, 104, 114, 101, 102, 61, 34, 104, 116, 116, 112, 58, 47, 47, 101, 110, 99, 111, 100, 105, 110, 103, 61, 34, 117, 116, 102, 45, 56, 34, 63, 62, 10, 119, 46, 97, 100, 100, 69, 118, 101, 110, 116, 76, 105, 115, 116, 101, 110, 101, 114, 63, 97, 99, 116, 105, 111, 110, 61, 34, 104, 116, 116, 112, 58, 47, 47, 119, 119, 119, 46, 105, 99, 111, 110, 34, 32, 104, 114, 101, 102, 61, 34, 104, 116, 116, 112, 58, 47, 47, 32, 115, 116, 121, 108, 101, 61, 34, 98, 97, 99, 107, 103, 114, 111, 117, 110, 100, 58, 116, 121, 112, 101, 61, 34, 116, 101, 120, 116, 47, 99, 115, 115, 34, 32, 47, 62, 10, 109, 101, 116, 97, 32, 112, 114, 111, 112, 101, 114, 116, 121, 61, 34, 111, 103, 58, 116, 60, 105, 110, 112, 117, 116, 32, 116, 121, 112, 101, 61, 34, 116, 101, 120, 116, 34, 32, 32, 115, 116, 121, 108, 101, 61, 34, 116, 101, 120, 116, 45, 97, 108, 105, 103, 110, 58, 116, 104, 101, 32, 100, 101, 118, 101, 108, 111, 112, 109, 101, 110, 116, 32, 111, 102, 32, 116, 121, 108, 101, 115, 104, 101, 101, 116, 34, 32, 116, 121, 112, 101, 61, 34, 116, 101, 104, 116, 109, 108, 59, 32, 99, 104, 97, 114, 115, 101, 116, 61, 117, 116, 102, 45, 56, 105, 115, 32, 99, 111, 110, 115, 105, 100, 101, 114, 101, 100, 32, 116, 111, 32, 98, 101, 116, 97, 98, 108, 101, 32, 119, 105, 100, 116, 104, 61, 34, 49, 48, 48, 37, 34, 32, 73, 110, 32, 97, 100, 100, 105, 116, 105, 111, 110, 32, 116, 111, 32, 116, 104, 101, 32, 99, 111, 110, 116, 114, 105, 98, 117, 116, 101, 100, 32, 116, 111, 32, 116, 104, 101, 32, 100, 105, 102, 102, 101, 114, 101, 110, 99, 101, 115, 32, 98, 101, 116, 119, 101, 101, 110, 100, 101, 118, 101, 108, 111, 112, 109, 101, 110, 116, 32, 111, 102, 32, 116, 104, 101, 32, 73, 116, 32, 105, 115, 32, 105, 109, 112, 111, 114, 116, 97, 110, 116, 32, 116, 111, 32, 60, 47, 115, 99, 114, 105, 112, 116, 62, 10, 10, 60, 115, 99, 114, 105, 112, 116, 32, 32, 115, 116, 121, 108, 101, 61, 34, 102, 111, 110, 116, 45, 115, 105, 122, 101, 58, 49, 62, 60, 47, 115, 112, 97, 110, 62, 60, 115, 112, 97, 110, 32, 105, 100, 61, 103, 98, 76, 105, 98, 114, 97, 114, 121, 32, 111, 102, 32, 67, 111, 110, 103, 114, 101, 115, 115, 60, 105, 109, 103, 32, 115, 114, 99, 61, 34, 104, 116, 116, 112, 58, 47, 47, 105, 109, 69, 110, 103, 108, 105, 115, 104, 32, 116, 114, 97, 110, 115, 108, 97, 116, 105, 111, 110, 65, 99, 97, 100, 101, 109, 121, 32, 111, 102, 32, 83, 99, 105, 101, 110, 99, 101, 115, 100, 105, 118, 32, 115, 116, 121, 108, 101, 61, 34, 100, 105, 115, 112, 108, 97, 121, 58, 99, 111, 110, 115, 116, 114, 117, 99, 116, 105, 111, 110, 32, 111, 102, 32, 116, 104, 101, 46, 103, 101, 116, 69, 108, 101, 109, 101, 110, 116, 66, 121, 73, 100, 40, 105, 100, 41, 105, 110, 32, 99, 111, 110, 106, 117, 110, 99, 116, 105, 111, 110, 32, 119, 105, 116, 104, 69, 108, 101, 109, 101, 110, 116, 40, 39, 115, 99, 114, 105, 112, 116, 39, 41, 59, 32, 60, 109, 101, 116, 97, 32, 112, 114, 111, 112, 101, 114, 116, 121, 61, 34, 111, 103, 58, 208, 145, 209, 138, 208, 187, 208, 179, 208, 176, 209, 128, 209, 129, 208, 186, 208, 184, 10, 32, 116, 121, 112, 101, 61, 34, 116, 101, 120, 116, 34, 32, 110, 97, 109, 101, 61, 34, 62, 80, 114, 105, 118, 97, 99, 121, 32, 80, 111, 108, 105, 99, 121, 60, 47, 97, 62, 97, 100, 109, 105, 110, 105, 115, 116, 101, 114, 101, 100, 32, 98, 121, 32, 116, 104, 101, 101, 110, 97, 98, 108, 101, 83, 105, 110, 103, 108, 101, 82, 101, 113, 117, 101, 115, 116, 115, 116, 121, 108, 101, 61, 38, 113, 117, 111, 116, 59, 109, 97, 114, 103, 105, 110, 58, 60, 47, 100, 105, 118, 62, 60, 47, 100, 105, 118, 62, 60, 47, 100, 105, 118, 62, 60, 62, 60, 105, 109, 103, 32, 115, 114, 99, 61, 34, 104, 116, 116, 112, 58, 47, 47, 105, 32, 115, 116, 121, 108, 101, 61, 38, 113, 117, 111, 116, 59, 102, 108, 111, 97, 116, 58, 114, 101, 102, 101, 114, 114, 101, 100, 32, 116, 111, 32, 97, 115, 32, 116, 104, 101, 32, 116, 111, 116, 97, 108, 32, 112, 111, 112, 117, 108, 97, 116, 105, 111, 110, 32, 111, 102, 105, 110, 32, 87, 97, 115, 104, 105, 110, 103, 116, 111, 110, 44, 32, 68, 46, 67, 46, 32, 115, 116, 121, 108, 101, 61, 34, 98, 97, 99, 107, 103, 114, 111, 117, 110, 100, 45, 97, 109, 111, 110, 103, 32, 111, 116, 104, 101, 114, 32, 116, 104, 105, 110, 103, 115, 44, 111, 114, 103, 97, 110, 105, 122, 97, 116, 105, 111, 110, 32, 111, 102, 32, 116, 104, 101, 112, 97, 114, 116, 105, 99, 105, 112, 97, 116, 101, 100, 32, 105, 110, 32, 116, 104, 101, 116, 104, 101, 32, 105, 110, 116, 114, 111, 100, 117, 99, 116, 105, 111, 110, 32, 111, 102, 105, 100, 101, 110, 116, 105, 102, 105, 101, 100, 32, 119, 105, 116, 104, 32, 116, 104, 101, 102, 105, 99, 116, 105, 111, 110, 97, 108, 32, 99, 104, 97, 114, 97, 99, 116, 101, 114, 32, 79, 120, 102, 111, 114, 100, 32, 85, 110, 105, 118, 101, 114, 115, 105, 116, 121, 32, 109, 105, 115, 117, 110, 100, 101, 114, 115, 116, 97, 110, 100, 105, 110, 103, 32, 111, 102, 84, 104, 101, 114, 101, 32, 97, 114, 101, 44, 32, 104, 111, 119, 101, 118, 101, 114, 44, 115, 116, 121, 108, 101, 115, 104, 101, 101, 116, 34, 32, 104, 114, 101, 102, 61, 34, 47, 67, 111, 108, 117, 109, 98, 105, 97, 32, 85, 110, 105, 118, 101, 114, 115, 105, 116, 121, 101, 120, 112, 97, 110, 100, 101, 100, 32, 116, 111, 32, 105, 110, 99, 108, 117, 100, 101, 117, 115, 117, 97, 108, 108, 121, 32, 114, 101, 102, 101, 114, 114, 101, 100, 32, 116, 111, 105, 110, 100, 105, 99, 97, 116, 105, 110, 103, 32, 116, 104, 97, 116, 32, 116, 104, 101, 104, 97, 118, 101, 32, 115, 117, 103, 103, 101, 115, 116, 101, 100, 32, 116, 104, 97, 116, 97, 102, 102, 105, 108, 105, 97, 116, 101, 100, 32, 119, 105, 116, 104, 32, 116, 104, 101, 99, 111, 114, 114, 101, 108, 97, 116, 105, 111, 110, 32, 98, 101, 116, 119, 101, 101, 110, 110, 117, 109, 98, 101, 114, 32, 111, 102, 32, 100, 105, 102, 102, 101, 114, 101, 110, 116, 62, 60, 47, 116, 100, 62, 60, 47, 116, 114, 62, 60, 47, 116, 97, 98, 108, 101, 62, 82, 101, 112, 117, 98, 108, 105, 99, 32, 111, 102, 32, 73, 114, 101, 108, 97, 110, 100, 10, 60, 47, 115, 99, 114, 105, 112, 116, 62, 10, 60, 115, 99, 114, 105, 112, 116, 32, 117, 110, 100, 101, 114, 32, 116, 104, 101, 32, 105, 110, 102, 108, 117, 101, 110, 99, 101, 99, 111, 110, 116, 114, 105, 98, 117, 116, 105, 111, 110, 32, 116, 111, 32, 116, 104, 101, 79, 102, 102, 105, 99, 105, 97, 108, 32, 119, 101, 98, 115, 105, 116, 101, 32, 111, 102, 104, 101, 97, 100, 113, 117, 97, 114, 116, 101, 114, 115, 32, 111, 102, 32, 116, 104, 101, 99, 101, 110, 116, 101, 114, 101, 100, 32, 97, 114, 111, 117, 110, 100, 32, 116, 104, 101, 105, 109, 112, 108, 105, 99, 97, 116, 105, 111, 110, 115, 32, 111, 102, 32, 116, 104, 101, 104, 97, 118, 101, 32, 98, 101, 101, 110, 32, 100, 101, 118, 101, 108, 111, 112, 101, 100, 70, 101, 100, 101, 114, 97, 108, 32, 82, 101, 112, 117, 98, 108, 105, 99, 32, 111, 102, 98, 101, 99, 97, 109, 101, 32, 105, 110, 99, 114, 101, 97, 115, 105, 110, 103, 108, 121, 99, 111, 110, 116, 105, 110, 117, 97, 116, 105, 111, 110, 32, 111, 102, 32, 116, 104, 101, 78, 111, 116, 101, 44, 32, 104, 111, 119, 101, 118, 101, 114, 44, 32, 116, 104, 97, 116, 115, 105, 109, 105, 108, 97, 114, 32, 116, 111, 32, 116, 104, 97, 116, 32, 111, 102, 32, 99, 97, 112, 97, 98, 105, 108, 105, 116, 105, 101, 115, 32, 111, 102, 32, 116, 104, 101, 97, 99, 99, 111, 114, 100, 97, 110, 99, 101, 32, 119, 105, 116, 104, 32, 116, 104, 101, 112, 97, 114, 116, 105, 99, 105, 112, 97, 110, 116, 115, 32, 105, 110, 32, 116, 104, 101, 102, 117, 114, 116, 104, 101, 114, 32, 100, 101, 118, 101, 108, 111, 112, 109, 101, 110, 116, 117, 110, 100, 101, 114, 32, 116, 104, 101, 32, 100, 105, 114, 101, 99, 116, 105, 111, 110, 105, 115, 32, 111, 102, 116, 101, 110, 32, 99, 111, 110, 115, 105, 100, 101, 114, 101, 100, 104, 105, 115, 32, 121, 111, 117, 110, 103, 101, 114, 32, 98, 114, 111, 116, 104, 101, 114, 60, 47, 116, 100, 62, 60, 47, 116, 114, 62, 60, 47, 116, 97, 98, 108, 101, 62, 60, 97, 32, 104, 116, 116, 112, 45, 101, 113, 117, 105, 118, 61, 34, 88, 45, 85, 65, 45, 112, 104, 121, 115, 105, 99, 97, 108, 32, 112, 114, 111, 112, 101, 114, 116, 105, 101, 115, 111, 102, 32, 66, 114, 105, 116, 105, 115, 104, 32, 67, 111, 108, 117, 109, 98, 105, 97, 104, 97, 115, 32, 98, 101, 101, 110, 32, 99, 114, 105, 116, 105, 99, 105, 122, 101, 100, 40, 119, 105, 116, 104, 32, 116, 104, 101, 32, 101, 120, 99, 101, 112, 116, 105, 111, 110, 113, 117, 101, 115, 116, 105, 111, 110, 115, 32, 97, 98, 111, 117, 116, 32, 116, 104, 101, 112, 97, 115, 115, 105, 110, 103, 32, 116, 104, 114, 111, 117, 103, 104, 32, 116, 104, 101, 48, 34, 32, 99, 101, 108, 108, 112, 97, 100, 100, 105, 110, 103, 61, 34, 48, 34, 32, 116, 104, 111, 117, 115, 97, 110, 100, 115, 32, 111, 102, 32, 112, 101, 111, 112, 108, 101, 114, 101, 100, 105, 114, 101, 99, 116, 115, 32, 104, 101, 114, 101, 46, 32, 70, 111, 114, 104, 97, 118, 101, 32, 99, 104, 105, 108, 100, 114, 101, 110, 32, 117, 110, 100, 101, 114, 37, 51, 69, 37, 51, 67, 47, 115, 99, 114, 105, 112, 116, 37, 51, 69, 34, 41, 41, 59, 60, 97, 32, 104, 114, 101, 102, 61, 34, 104, 116, 116, 112, 58, 47, 47, 119, 119, 119, 46, 60, 108, 105, 62, 60, 97, 32, 104, 114, 101, 102, 61, 34, 104, 116, 116, 112, 58, 47, 47, 115, 105, 116, 101, 95, 110, 97, 109, 101, 34, 32, 99, 111, 110, 116, 101, 110, 116, 61, 34, 116, 101, 120, 116, 45, 100, 101, 99, 111, 114, 97, 116, 105, 111, 110, 58, 110, 111, 110, 101, 115, 116, 121, 108, 101, 61, 34, 100, 105, 115, 112, 108, 97, 121, 58, 32, 110, 111, 110, 101, 60, 109, 101, 116, 97, 32, 104, 116, 116, 112, 45, 101, 113, 117, 105, 118, 61, 34, 88, 45, 110, 101, 119, 32, 68, 97, 116, 101, 40, 41, 46, 103, 101, 116, 84, 105, 109, 101, 40, 41, 32, 116, 121, 112, 101, 61, 34, 105, 109, 97, 103, 101, 47, 120, 45, 105, 99, 111, 110, 34, 60, 47, 115, 112, 97, 110, 62, 60, 115, 112, 97, 110, 32, 99, 108, 97, 115, 115, 61, 34, 108, 97, 110, 103, 117, 97, 103, 101, 61, 34, 106, 97, 118, 97, 115, 99, 114, 105, 112, 116, 119, 105, 110, 100, 111, 119, 46, 108, 111, 99, 97, 116, 105, 111, 110, 46, 104, 114, 101, 102, 60, 97, 32, 104, 114, 101, 102, 61, 34, 106, 97, 118, 97, 115, 99, 114, 105, 112, 116, 58, 45, 45, 62, 13, 10, 60, 115, 99, 114, 105, 112, 116, 32, 116, 121, 112, 101, 61, 34, 116, 60, 97, 32, 104, 114, 101, 102, 61, 39, 104, 116, 116, 112, 58, 47, 47, 119, 119, 119, 46, 104, 111, 114, 116, 99, 117, 116, 32, 105, 99, 111, 110, 34, 32, 104, 114, 101, 102, 61, 34, 60, 47, 100, 105, 118, 62, 13, 10, 60, 100, 105, 118, 32, 99, 108, 97, 115, 115, 61, 34, 60, 115, 99, 114, 105, 112, 116, 32, 115, 114, 99, 61, 34, 104, 116, 116, 112, 58, 47, 47, 34, 32, 114, 101, 108, 61, 34, 115, 116, 121, 108, 101, 115, 104, 101, 101, 116, 34, 32, 116, 60, 47, 100, 105, 118, 62, 10, 60, 115, 99, 114, 105, 112, 116, 32, 116, 121, 112, 101, 61, 47, 97, 62, 32, 60, 97, 32, 104, 114, 101, 102, 61, 34, 104, 116, 116, 112, 58, 47, 47, 32, 97, 108, 108, 111, 119, 84, 114, 97, 110, 115, 112, 97, 114, 101, 110, 99, 121, 61, 34, 88, 45, 85, 65, 45, 67, 111, 109, 112, 97, 116, 105, 98, 108, 101, 34, 32, 99, 111, 110, 114, 101, 108, 97, 116, 105, 111, 110, 115, 104, 105, 112, 32, 98, 101, 116, 119, 101, 101, 110, 10, 60, 47, 115, 99, 114, 105, 112, 116, 62, 13, 10, 60, 115, 99, 114, 105, 112, 116, 32, 60, 47, 97, 62, 60, 47, 108, 105, 62, 60, 47, 117, 108, 62, 60, 47, 100, 105, 118, 62, 97, 115, 115, 111, 99, 105, 97, 116, 101, 100, 32, 119, 105, 116, 104, 32, 116, 104, 101, 32, 112, 114, 111, 103, 114, 97, 109, 109, 105, 110, 103, 32, 108, 97, 110, 103, 117, 97, 103, 101, 60, 47, 97, 62, 60, 97, 32, 104, 114, 101, 102, 61, 34, 104, 116, 116, 112, 58, 47, 47, 60, 47, 97, 62, 60, 47, 108, 105, 62, 60, 108, 105, 32, 99, 108, 97, 115, 115, 61, 34, 102, 111, 114, 109, 32, 97, 99, 116, 105, 111, 110, 61, 34, 104, 116, 116, 112, 58, 47, 47, 60, 100, 105, 118, 32, 115, 116, 121, 108, 101, 61, 34, 100, 105, 115, 112, 108, 97, 121, 58, 116, 121, 112, 101, 61, 34, 116, 101, 120, 116, 34, 32, 110, 97, 109, 101, 61, 34, 113, 34, 60, 116, 97, 98, 108, 101, 32, 119, 105, 100, 116, 104, 61, 34, 49, 48, 48, 37, 34, 32, 98, 97, 99, 107, 103, 114, 111, 117, 110, 100, 45, 112, 111, 115, 105, 116, 105, 111, 110, 58, 34, 32, 98, 111, 114, 100, 101, 114, 61, 34, 48, 34, 32, 119, 105, 100, 116, 104, 61, 34, 114, 101, 108, 61, 34, 115, 104, 111, 114, 116, 99, 117, 116, 32, 105, 99, 111, 110, 34, 32, 104, 54, 62, 60, 117, 108, 62, 60, 108, 105, 62, 60, 97, 32, 104, 114, 101, 102, 61, 34, 32, 32, 60, 109, 101, 116, 97, 32, 104, 116, 116, 112, 45, 101, 113, 117, 105, 118, 61, 34, 99, 115, 115, 34, 32, 109, 101, 100, 105, 97, 61, 34, 115, 99, 114, 101, 101, 110, 34, 32, 114, 101, 115, 112, 111, 110, 115, 105, 98, 108, 101, 32, 102, 111, 114, 32, 116, 104, 101, 32, 34, 32, 116, 121, 112, 101, 61, 34, 97, 112, 112, 108, 105, 99, 97, 116, 105, 111, 110, 47, 34, 32, 115, 116, 121, 108, 101, 61, 34, 98, 97, 99, 107, 103, 114, 111, 117, 110, 100, 45, 104, 116, 109, 108, 59, 32, 99, 104, 97, 114, 115, 101, 116, 61, 117, 116, 102, 45, 56, 34, 32, 97, 108, 108, 111, 119, 116, 114, 97, 110, 115, 112, 97, 114, 101, 110, 99, 121, 61, 34, 115, 116, 121, 108, 101, 115, 104, 101, 101, 116, 34, 32, 116, 121, 112, 101, 61, 34, 116, 101, 13, 10, 60, 109, 101, 116, 97, 32, 104, 116, 116, 112, 45, 101, 113, 117, 105, 118, 61, 34, 62, 60, 47, 115, 112, 97, 110, 62, 60, 115, 112, 97, 110, 32, 99, 108, 97, 115, 115, 61, 34, 48, 34, 32, 99, 101, 108, 108, 115, 112, 97, 99, 105, 110, 103, 61, 34, 48, 34, 62, 59, 10, 60, 47, 115, 99, 114, 105, 112, 116, 62, 10, 60, 115, 99, 114, 105, 112, 116, 32, 115, 111, 109, 101, 116, 105, 109, 101, 115, 32, 99, 97, 108, 108, 101, 100, 32, 116, 104, 101, 100, 111, 101, 115, 32, 110, 111, 116, 32, 110, 101, 99, 101, 115, 115, 97, 114, 105, 108, 121, 70, 111, 114, 32, 109, 111, 114, 101, 32, 105, 110, 102, 111, 114, 109, 97, 116, 105, 111, 110, 97, 116, 32, 116, 104, 101, 32, 98, 101, 103, 105, 110, 110, 105, 110, 103, 32, 111, 102, 32, 60, 33, 68, 79, 67, 84, 89, 80, 69, 32, 104, 116, 109, 108, 62, 60, 104, 116, 109, 108, 112, 97, 114, 116, 105, 99, 117, 108, 97, 114, 108, 121, 32, 105, 110, 32, 116, 104, 101, 32, 116, 121, 112, 101, 61, 34, 104, 105, 100, 100, 101, 110, 34, 32, 110, 97, 109, 101, 61, 34, 106, 97, 118, 97, 115, 99, 114, 105, 112, 116, 58, 118, 111, 105, 100, 40, 48, 41, 59, 34, 101, 102, 102, 101, 99, 116, 105, 118, 101, 110, 101, 115, 115, 32, 111, 102, 32, 116, 104, 101, 32, 97, 117, 116, 111, 99, 111, 109, 112, 108, 101, 116, 101, 61, 34, 111, 102, 102, 34, 32, 103, 101, 110, 101, 114, 97, 108, 108, 121, 32, 99, 111, 110, 115, 105, 100, 101, 114, 101, 100, 62, 60, 105, 110, 112, 117, 116, 32, 116, 121, 112, 101, 61, 34, 116, 101, 120, 116, 34, 32, 34, 62, 60, 47, 115, 99, 114, 105, 112, 116, 62, 13, 10, 60, 115, 99, 114, 105, 112, 116, 116, 104, 114, 111, 117, 103, 104, 111, 117, 116, 32, 116, 104, 101, 32, 119, 111, 114, 108, 100, 99, 111, 109, 109, 111, 110, 32, 109, 105, 115, 99, 111, 110, 99, 101, 112, 116, 105, 111, 110, 97, 115, 115, 111, 99, 105, 97, 116, 105, 111, 110, 32, 119, 105, 116, 104, 32, 116, 104, 101, 60, 47, 100, 105, 118, 62, 10, 60, 47, 100, 105, 118, 62, 10, 60, 100, 105, 118, 32, 99, 100, 117, 114, 105, 110, 103, 32, 104, 105, 115, 32, 108, 105, 102, 101, 116, 105, 109, 101, 44, 99, 111, 114, 114, 101, 115, 112, 111, 110, 100, 105, 110, 103, 32, 116, 111, 32, 116, 104, 101, 116, 121, 112, 101, 61, 34, 105, 109, 97, 103, 101, 47, 120, 45, 105, 99, 111, 110, 34, 32, 97, 110, 32, 105, 110, 99, 114, 101, 97, 115, 105, 110, 103, 32, 110, 117, 109, 98, 101, 114, 100, 105, 112, 108, 111, 109, 97, 116, 105, 99, 32, 114, 101, 108, 97, 116, 105, 111, 110, 115, 97, 114, 101, 32, 111, 102, 116, 101, 110, 32, 99, 111, 110, 115, 105, 100, 101, 114, 101, 100, 109, 101, 116, 97, 32, 99, 104, 97, 114, 115, 101, 116, 61, 34, 117, 116, 102, 45, 56, 34, 32, 60, 105, 110, 112, 117, 116, 32, 116, 121, 112, 101, 61, 34, 116, 101, 120, 116, 34, 32, 101, 120, 97, 109, 112, 108, 101, 115, 32, 105, 110, 99, 108, 117, 100, 101, 32, 116, 104, 101, 34, 62, 60, 105, 109, 103, 32, 115, 114, 99, 61, 34, 104, 116, 116, 112, 58, 47, 47, 105, 112, 97, 114, 116, 105, 99, 105, 112, 97, 116, 105, 111, 110, 32, 105, 110, 32, 116, 104, 101, 116, 104, 101, 32, 101, 115, 116, 97, 98, 108, 105, 115, 104, 109, 101, 110, 116, 32, 111, 102, 10, 60, 47, 100, 105, 118, 62, 10, 60, 100, 105, 118, 32, 99, 108, 97, 115, 115, 61, 34, 38, 97, 109, 112, 59, 110, 98, 115, 112, 59, 38, 97, 109, 112, 59, 110, 98, 115, 112, 59, 116, 111, 32, 100, 101, 116, 101, 114, 109, 105, 110, 101, 32, 119, 104, 101, 116, 104, 101, 114, 113, 117, 105, 116, 101, 32, 100, 105, 102, 102, 101, 114, 101, 110, 116, 32, 102, 114, 111, 109, 109, 97, 114, 107, 101, 100, 32, 116, 104, 101, 32, 98, 101, 103, 105, 110, 110, 105, 110, 103, 100, 105, 115, 116, 97, 110, 99, 101, 32, 98, 101, 116, 119, 101, 101, 110, 32, 116, 104, 101, 99, 111, 110, 116, 114, 105, 98, 117, 116, 105, 111, 110, 115, 32, 116, 111, 32, 116, 104, 101, 99, 111, 110, 102, 108, 105, 99, 116, 32, 98, 101, 116, 119, 101, 101, 110, 32, 116, 104, 101, 119, 105, 100, 101, 108, 121, 32, 99, 111, 110, 115, 105, 100, 101, 114, 101, 100, 32, 116, 111, 119, 97, 115, 32, 111, 110, 101, 32, 111, 102, 32, 116, 104, 101, 32, 102, 105, 114, 115, 116, 119, 105, 116, 104, 32, 118, 97, 114, 121, 105, 110, 103, 32, 100, 101, 103, 114, 101, 101, 115, 104, 97, 118, 101, 32, 115, 112, 101, 99, 117, 108, 97, 116, 101, 100, 32, 116, 104, 97, 116, 40, 100, 111, 99, 117, 109, 101, 110, 116, 46, 103, 101, 116, 69, 108, 101, 109, 101, 110, 116, 112, 97, 114, 116, 105, 99, 105, 112, 97, 116, 105, 110, 103, 32, 105, 110, 32, 116, 104, 101, 111, 114, 105, 103, 105, 110, 97, 108, 108, 121, 32, 100, 101, 118, 101, 108, 111, 112, 101, 100, 101, 116, 97, 32, 99, 104, 97, 114, 115, 101, 116, 61, 34, 117, 116, 102, 45, 56, 34, 62, 32, 116, 121, 112, 101, 61, 34, 116, 101, 120, 116, 47, 99, 115, 115, 34, 32, 47, 62, 10, 105, 110, 116, 101, 114, 99, 104, 97, 110, 103, 101, 97, 98, 108, 121, 32, 119, 105, 116, 104, 109, 111, 114, 101, 32, 99, 108, 111, 115, 101, 108, 121, 32, 114, 101, 108, 97, 116, 101, 100, 115, 111, 99, 105, 97, 108, 32, 97, 110, 100, 32, 112, 111, 108, 105, 116, 105, 99, 97, 108, 116, 104, 97, 116, 32, 119, 111, 117, 108, 100, 32, 111, 116, 104, 101, 114, 119, 105, 115, 101, 112, 101, 114, 112, 101, 110, 100, 105, 99, 117, 108, 97, 114, 32, 116, 111, 32, 116, 104, 101, 115, 116, 121, 108, 101, 32, 116, 121, 112, 101, 61, 34, 116, 101, 120, 116, 47, 99, 115, 115, 116, 121, 112, 101, 61, 34, 115, 117, 98, 109, 105, 116, 34, 32, 110, 97, 109, 101, 61, 34, 102, 97, 109, 105, 108, 105, 101, 115, 32, 114, 101, 115, 105, 100, 105, 110, 103, 32, 105, 110, 100, 101, 118, 101, 108, 111, 112, 105, 110, 103, 32, 99, 111, 117, 110, 116, 114, 105, 101, 115, 99, 111, 109, 112, 117, 116, 101, 114, 32, 112, 114, 111, 103, 114, 97, 109, 109, 105, 110, 103, 101, 99, 111, 110, 111, 109, 105, 99, 32, 100, 101, 118, 101, 108, 111, 112, 109, 101, 110, 116, 100, 101, 116, 101, 114, 109, 105, 110, 97, 116, 105, 111, 110, 32, 111, 102, 32, 116, 104, 101, 102, 111, 114, 32, 109, 111, 114, 101, 32, 105, 110, 102, 111, 114, 109, 97, 116, 105, 111, 110, 111, 110, 32, 115, 101, 118, 101, 114, 97, 108, 32, 111, 99, 99, 97, 115, 105, 111, 110, 115, 112, 111, 114, 116, 117, 103, 117, 195, 170, 115, 32, 40, 69, 117, 114, 111, 112, 101, 117, 41, 208, 163, 208, 186, 209, 128, 208, 176, 209, 151, 208, 189, 209, 129, 209, 140, 208, 186, 208, 176, 209, 131, 208, 186, 209, 128, 208, 176, 209, 151, 208, 189, 209, 129, 209, 140, 208, 186, 208, 176, 208, 160, 208, 190, 209, 129, 209, 129, 208, 184, 208, 185, 209, 129, 208, 186, 208, 190, 208, 185, 208, 188, 208, 176, 209, 130, 208, 181, 209, 128, 208, 184, 208, 176, 208, 187, 208, 190, 208, 178, 208, 184, 208, 189, 209, 132, 208, 190, 209, 128, 208, 188, 208, 176, 209, 134, 208, 184, 208, 184, 209, 131, 208, 191, 209, 128, 208, 176, 208, 178, 208, 187, 208, 181, 208, 189, 208, 184, 209, 143, 208, 189, 208, 181, 208, 190, 208, 177, 209, 133, 208, 190, 208, 180, 208, 184, 208, 188, 208, 190, 208, 184, 208, 189, 209, 132, 208, 190, 209, 128, 208, 188, 208, 176, 209, 134, 208, 184, 209, 143, 208, 152, 208, 189, 209, 132, 208, 190, 209, 128, 208, 188, 208, 176, 209, 134, 208, 184, 209, 143, 208, 160, 208, 181, 209, 129, 208, 191, 209, 131, 208, 177, 208, 187, 208, 184, 208, 186, 208, 184, 208, 186, 208, 190, 208, 187, 208, 184, 209, 135, 208, 181, 209, 129, 209, 130, 208, 178, 208, 190, 208, 184, 208, 189, 209, 132, 208, 190, 209, 128, 208, 188, 208, 176, 209, 134, 208, 184, 209, 142, 209, 130, 208, 181, 209, 128, 209, 128, 208, 184, 209, 130, 208, 190, 209, 128, 208, 184, 208, 184, 208, 180, 208, 190, 209, 129, 209, 130, 208, 176, 209, 130, 208, 190, 209, 135, 208, 189, 208, 190, 216, 167, 217, 132, 217, 133, 216, 170, 217, 136, 216, 167, 216, 172, 216, 175, 217, 136, 217, 134, 216, 167, 217, 132, 216, 167, 216, 180, 216, 170, 216, 177, 216, 167, 217, 131, 216, 167, 216, 170, 216, 167, 217, 132, 216, 167, 217, 130, 216, 170, 216, 177, 216, 167, 216, 173, 216, 167, 216, 170, 104, 116, 109, 108, 59, 32, 99, 104, 97, 114, 115, 101, 116, 61, 85, 84, 70, 45, 56, 34, 32, 115, 101, 116, 84, 105, 109, 101, 111, 117, 116, 40, 102, 117, 110, 99, 116, 105, 111, 110, 40, 41, 100, 105, 115, 112, 108, 97, 121, 58, 105, 110, 108, 105, 110, 101, 45, 98, 108, 111, 99, 107, 59, 60, 105, 110, 112, 117, 116, 32, 116, 121, 112, 101, 61, 34, 115, 117, 98, 109, 105, 116, 34, 32, 116, 121, 112, 101, 32, 61, 32, 39, 116, 101, 120, 116, 47, 106, 97, 118, 97, 115, 99, 114, 105, 60, 105, 109, 103, 32, 115, 114, 99, 61, 34, 104, 116, 116, 112, 58, 47, 47, 119, 119, 119, 46, 34, 32, 34, 104, 116, 116, 112, 58, 47, 47, 119, 119, 119, 46, 119, 51, 46, 111, 114, 103, 47, 115, 104, 111, 114, 116, 99, 117, 116, 32, 105, 99, 111, 110, 34, 32, 104, 114, 101, 102, 61, 34, 34, 32, 97, 117, 116, 111, 99, 111, 109, 112, 108, 101, 116, 101, 61, 34, 111, 102, 102, 34, 32, 60, 47, 97, 62, 60, 47, 100, 105, 118, 62, 60, 100, 105, 118, 32, 99, 108, 97, 115, 115, 61, 60, 47, 97, 62, 60, 47, 108, 105, 62, 10, 60, 108, 105, 32, 99, 108, 97, 115, 115, 61, 34, 99, 115, 115, 34, 32, 116, 121, 112, 101, 61, 34, 116, 101, 120, 116, 47, 99, 115, 115, 34, 32, 60, 102, 111, 114, 109, 32, 97, 99, 116, 105, 111, 110, 61, 34, 104, 116, 116, 112, 58, 47, 47, 120, 116, 47, 99, 115, 115, 34, 32, 104, 114, 101, 102, 61, 34, 104, 116, 116, 112, 58, 47, 47, 108, 105, 110, 107, 32, 114, 101, 108, 61, 34, 97, 108, 116, 101, 114, 110, 97, 116, 101, 34, 32, 13, 10, 60, 115, 99, 114, 105, 112, 116, 32, 116, 121, 112, 101, 61, 34, 116, 101, 120, 116, 47, 32, 111, 110, 99, 108, 105, 99, 107, 61, 34, 106, 97, 118, 97, 115, 99, 114, 105, 112, 116, 58, 40, 110, 101, 119, 32, 68, 97, 116, 101, 41, 46, 103, 101, 116, 84, 105, 109, 101, 40, 41, 125, 104, 101, 105, 103, 104, 116, 61, 34, 49, 34, 32, 119, 105, 100, 116, 104, 61, 34, 49, 34, 32, 80, 101, 111, 112, 108, 101, 39, 115, 32, 82, 101, 112, 117, 98, 108, 105, 99, 32, 111, 102, 32, 32, 60, 97, 32, 104, 114, 101, 102, 61, 34, 104, 116, 116, 112, 58, 47, 47, 119, 119, 119, 46, 116, 101, 120, 116, 45, 100, 101, 99, 111, 114, 97, 116, 105, 111, 110, 58, 117, 110, 100, 101, 114, 116, 104, 101, 32, 98, 101, 103, 105, 110, 110, 105, 110, 103, 32, 111, 102, 32, 116, 104, 101, 32, 60, 47, 100, 105, 118, 62, 10, 60, 47, 100, 105, 118, 62, 10, 60, 47, 100, 105, 118, 62, 10, 101, 115, 116, 97, 98, 108, 105, 115, 104, 109, 101, 110, 116, 32, 111, 102, 32, 116, 104, 101, 32, 60, 47, 100, 105, 118, 62, 60, 47, 100, 105, 118, 62, 60, 47, 100, 105, 118, 62, 60, 47, 100, 35, 118, 105, 101, 119, 112, 111, 114, 116, 123, 109, 105, 110, 45, 104, 101, 105, 103, 104, 116, 58, 10, 60, 115, 99, 114, 105, 112, 116, 32, 115, 114, 99, 61, 34, 104, 116, 116, 112, 58, 47, 47, 111, 112, 116, 105, 111, 110, 62, 60, 111, 112, 116, 105, 111, 110, 32, 118, 97, 108, 117, 101, 61, 111, 102, 116, 101, 110, 32, 114, 101, 102, 101, 114, 114, 101, 100, 32, 116, 111, 32, 97, 115, 32, 47, 111, 112, 116, 105, 111, 110, 62, 10, 60, 111, 112, 116, 105, 111, 110, 32, 118, 97, 108, 117, 60, 33, 68, 79, 67, 84, 89, 80, 69, 32, 104, 116, 109, 108, 62, 10, 60, 33, 45, 45, 91, 73, 110, 116, 101, 114, 110, 97, 116, 105, 111, 110, 97, 108, 32, 65, 105, 114, 112, 111, 114, 116, 62, 10, 60, 97, 32, 104, 114, 101, 102, 61, 34, 104, 116, 116, 112, 58, 47, 47, 119, 119, 119, 60, 47, 97, 62, 60, 97, 32, 104, 114, 101, 102, 61, 34, 104, 116, 116, 112, 58, 47, 47, 119, 224, 184, 160, 224, 184, 178, 224, 184, 169, 224, 184, 178, 224, 185, 132, 224, 184, 151, 224, 184, 162, 225, 131, 165, 225, 131, 144, 225, 131, 160, 225, 131, 151, 225, 131, 163, 225, 131, 154, 225, 131, 152, 230, 173, 163, 233, 171, 148, 228, 184, 173, 230, 150, 135, 32, 40, 231, 185, 129, 233, 171, 148, 41, 224, 164, 168, 224, 164, 191, 224, 164, 176, 224, 165, 141, 224, 164, 166, 224, 165, 135, 224, 164, 182, 224, 164, 161, 224, 164, 190, 224, 164, 137, 224, 164, 168, 224, 164, 178, 224, 165, 139, 224, 164, 161, 224, 164, 149, 224, 165, 141, 224, 164, 183, 224, 165, 135, 224, 164, 164, 224, 165, 141, 224, 164, 176, 224, 164, 156, 224, 164, 190, 224, 164, 168, 224, 164, 149, 224, 164, 190, 224, 164, 176, 224, 165, 128, 224, 164, 184, 224, 164, 130, 224, 164, 172, 224, 164, 130, 224, 164, 167, 224, 164, 191, 224, 164, 164, 224, 164, 184, 224, 165, 141, 224, 164, 165, 224, 164, 190, 224, 164, 170, 224, 164, 168, 224, 164, 190, 224, 164, 184, 224, 165, 141, 224, 164, 181, 224, 165, 128, 224, 164, 149, 224, 164, 190, 224, 164, 176, 224, 164, 184, 224, 164, 130, 224, 164, 184, 224, 165, 141, 224, 164, 149, 224, 164, 176, 224, 164, 163, 224, 164, 184, 224, 164, 190, 224, 164, 174, 224, 164, 151, 224, 165, 141, 224, 164, 176, 224, 165, 128, 224, 164, 154, 224, 164, 191, 224, 164, 159, 224, 165, 141, 224, 164, 160, 224, 165, 139, 224, 164, 130, 224, 164, 181, 224, 164, 191, 224, 164, 156, 224, 165, 141, 224, 164, 158, 224, 164, 190, 224, 164, 168, 224, 164, 133, 224, 164, 174, 224, 165, 135, 224, 164, 176, 224, 164, 191, 224, 164, 149, 224, 164, 190, 224, 164, 181, 224, 164, 191, 224, 164, 173, 224, 164, 191, 224, 164, 168, 224, 165, 141, 224, 164, 168, 224, 164, 151, 224, 164, 190, 224, 164, 161, 224, 164, 191, 224, 164, 175, 224, 164, 190, 224, 164, 129, 224, 164, 149, 224, 165, 141, 224, 164, 175, 224, 165, 139, 224, 164, 130, 224, 164, 149, 224, 164, 191, 224, 164, 184, 224, 165, 129, 224, 164, 176, 224, 164, 149, 224, 165, 141, 224, 164, 183, 224, 164, 190, 224, 164, 170, 224, 164, 185, 224, 165, 129, 224, 164, 129, 224, 164, 154, 224, 164, 164, 224, 165, 128, 224, 164, 170, 224, 165, 141, 224, 164, 176, 224, 164, 172, 224, 164, 130, 224, 164, 167, 224, 164, 168, 224, 164, 159, 224, 164, 191, 224, 164, 170, 224, 165, 141, 224, 164, 170, 224, 164, 163, 224, 165, 128, 224, 164, 149, 224, 165, 141, 224, 164, 176, 224, 164, 191, 224, 164, 149, 224, 165, 135, 224, 164, 159, 224, 164, 170, 224, 165, 141, 224, 164, 176, 224, 164, 190, 224, 164, 176, 224, 164, 130, 224, 164, 173, 224, 164, 170, 224, 165, 141, 224, 164, 176, 224, 164, 190, 224, 164, 170, 224, 165, 141, 224, 164, 164, 224, 164, 174, 224, 164, 190, 224, 164, 178, 224, 164, 191, 224, 164, 149, 224, 165, 139, 224, 164, 130, 224, 164, 176, 224, 164, 171, 224, 164, 188, 224, 165, 141, 224, 164, 164, 224, 164, 190, 224, 164, 176, 224, 164, 168, 224, 164, 191, 224, 164, 176, 224, 165, 141, 224, 164, 174, 224, 164, 190, 224, 164, 163, 224, 164, 178, 224, 164, 191, 224, 164, 174, 224, 164, 191, 224, 164, 159, 224, 165, 135, 224, 164, 161, 100, 101, 115, 99, 114, 105, 112, 116, 105, 111, 110, 34, 32, 99, 111, 110, 116, 101, 110, 116, 61, 34, 100, 111, 99, 117, 109, 101, 110, 116, 46, 108, 111, 99, 97, 116, 105, 111, 110, 46, 112, 114, 111, 116, 46, 103, 101, 116, 69, 108, 101, 109, 101, 110, 116, 115, 66, 121, 84, 97, 103, 78, 97, 109, 101, 40, 60, 33, 68, 79, 67, 84, 89, 80, 69, 32, 104, 116, 109, 108, 62, 10, 60, 104, 116, 109, 108, 32, 60, 109, 101, 116, 97, 32, 99, 104, 97, 114, 115, 101, 116, 61, 34, 117, 116, 102, 45, 56, 34, 62, 58, 117, 114, 108, 34, 32, 99, 111, 110, 116, 101, 110, 116, 61, 34, 104, 116, 116, 112, 58, 47, 47, 46, 99, 115, 115, 34, 32, 114, 101, 108, 61, 34, 115, 116, 121, 108, 101, 115, 104, 101, 101, 116, 34, 115, 116, 121, 108, 101, 32, 116, 121, 112, 101, 61, 34, 116, 101, 120, 116, 47, 99, 115, 115, 34, 62, 116, 121, 112, 101, 61, 34, 116, 101, 120, 116, 47, 99, 115, 115, 34, 32, 104, 114, 101, 102, 61, 34, 119, 51, 46, 111, 114, 103, 47, 49, 57, 57, 57, 47, 120, 104, 116, 109, 108, 34, 32, 120, 109, 108, 116, 121, 112, 101, 61, 34, 116, 101, 120, 116, 47, 106, 97, 118, 97, 115, 99, 114, 105, 112, 116, 34, 32, 109, 101, 116, 104, 111, 100, 61, 34, 103, 101, 116, 34, 32, 97, 99, 116, 105, 111, 110, 61, 34, 108, 105, 110, 107, 32, 114, 101, 108, 61, 34, 115, 116, 121, 108, 101, 115, 104, 101, 101, 116, 34, 32, 32, 61, 32, 100, 111, 99, 117, 109, 101, 110, 116, 46, 103, 101, 116, 69, 108, 101, 109, 101, 110, 116, 116, 121, 112, 101, 61, 34, 105, 109, 97, 103, 101, 47, 120, 45, 105, 99, 111, 110, 34, 32, 47, 62, 99, 101, 108, 108, 112, 97, 100, 100, 105, 110, 103, 61, 34, 48, 34, 32, 99, 101, 108, 108, 115, 112, 46, 99, 115, 115, 34, 32, 116, 121, 112, 101, 61, 34, 116, 101, 120, 116, 47, 99, 115, 115, 34, 32, 60, 47, 97, 62, 60, 47, 108, 105, 62, 60, 108, 105, 62, 60, 97, 32, 104, 114, 101, 102, 61, 34, 34, 32, 119, 105, 100, 116, 104, 61, 34, 49, 34, 32, 104, 101, 105, 103, 104, 116, 61, 34, 49, 34, 34, 62, 60, 97, 32, 104, 114, 101, 102, 61, 34, 104, 116, 116, 112, 58, 47, 47, 119, 119, 119, 46, 115, 116, 121, 108, 101, 61, 34, 100, 105, 115, 112, 108, 97, 121, 58, 110, 111, 110, 101, 59, 34, 62, 97, 108, 116, 101, 114, 110, 97, 116, 101, 34, 32, 116, 121, 112, 101, 61, 34, 97, 112, 112, 108, 105, 45, 47, 47, 87, 51, 67, 47, 47, 68, 84, 68, 32, 88, 72, 84, 77, 76, 32, 49, 46, 48, 32, 101, 108, 108, 115, 112, 97, 99, 105, 110, 103, 61, 34, 48, 34, 32, 99, 101, 108, 108, 112, 97, 100, 32, 116, 121, 112, 101, 61, 34, 104, 105, 100, 100, 101, 110, 34, 32, 118, 97, 108, 117, 101, 61, 34, 47, 97, 62, 38, 110, 98, 115, 112, 59, 60, 115, 112, 97, 110, 32, 114, 111, 108, 101, 61, 34, 115, 10, 60, 105, 110, 112, 117, 116, 32, 116, 121, 112, 101, 61, 34, 104, 105, 100, 100, 101, 110, 34, 32, 108, 97, 110, 103, 117, 97, 103, 101, 61, 34, 74, 97, 118, 97, 83, 99, 114, 105, 112, 116, 34, 32, 32, 100, 111, 99, 117, 109, 101, 110, 116, 46, 103, 101, 116, 69, 108, 101, 109, 101, 110, 116, 115, 66, 103, 61, 34, 48, 34, 32, 99, 101, 108, 108, 115, 112, 97, 99, 105, 110, 103, 61, 34, 48, 34, 32, 121, 112, 101, 61, 34, 116, 101, 120, 116, 47, 99, 115, 115, 34, 32, 109, 101, 100, 105, 97, 61, 34, 116, 121, 112, 101, 61, 39, 116, 101, 120, 116, 47, 106, 97, 118, 97, 115, 99, 114, 105, 112, 116, 39, 119, 105, 116, 104, 32, 116, 104, 101, 32, 101, 120, 99, 101, 112, 116, 105, 111, 110, 32, 111, 102, 32, 121, 112, 101, 61, 34, 116, 101, 120, 116, 47, 99, 115, 115, 34, 32, 114, 101, 108, 61, 34, 115, 116, 32, 104, 101, 105, 103, 104, 116, 61, 34, 49, 34, 32, 119, 105, 100, 116, 104, 61, 34, 49, 34, 32, 61, 39, 43, 101, 110, 99, 111, 100, 101, 85, 82, 73, 67, 111, 109, 112, 111, 110, 101, 110, 116, 40, 60, 108, 105, 110, 107, 32, 114, 101, 108, 61, 34, 97, 108, 116, 101, 114, 110, 97, 116, 101, 34, 32, 10, 98, 111, 100, 121, 44, 32, 116, 114, 44, 32, 105, 110, 112, 117, 116, 44, 32, 116, 101, 120, 116, 109, 101, 116, 97, 32, 110, 97, 109, 101, 61, 34, 114, 111, 98, 111, 116, 115, 34, 32, 99, 111, 110, 109, 101, 116, 104, 111, 100, 61, 34, 112, 111, 115, 116, 34, 32, 97, 99, 116, 105, 111, 110, 61, 34, 62, 10, 60, 97, 32, 104, 114, 101, 102, 61, 34, 104, 116, 116, 112, 58, 47, 47, 119, 119, 119, 46, 99, 115, 115, 34, 32, 114, 101, 108, 61, 34, 115, 116, 121, 108, 101, 115, 104, 101, 101, 116, 34, 32, 60, 47, 100, 105, 118, 62, 60, 47, 100, 105, 118, 62, 60, 100, 105, 118, 32, 99, 108, 97, 115, 115, 108, 97, 110, 103, 117, 97, 103, 101, 61, 34, 106, 97, 118, 97, 115, 99, 114, 105, 112, 116, 34, 62, 97, 114, 105, 97, 45, 104, 105, 100, 100, 101, 110, 61, 34, 116, 114, 117, 101, 34, 62, 194, 183, 60, 114, 105, 112, 116, 34, 32, 116, 121, 112, 101, 61, 34, 116, 101, 120, 116, 47, 106, 97, 118, 97, 115, 108, 61, 48, 59, 125, 41, 40, 41, 59, 10, 40, 102, 117, 110, 99, 116, 105, 111, 110, 40, 41, 123, 98, 97, 99, 107, 103, 114, 111, 117, 110, 100, 45, 105, 109, 97, 103, 101, 58, 32, 117, 114, 108, 40, 47, 97, 62, 60, 47, 108, 105, 62, 60, 108, 105, 62, 60, 97, 32, 104, 114, 101, 102, 61, 34, 104, 9, 9, 60, 108, 105, 62, 60, 97, 32, 104, 114, 101, 102, 61, 34, 104, 116, 116, 112, 58, 47, 47, 97, 116, 111, 114, 34, 32, 97, 114, 105, 97, 45, 104, 105, 100, 100, 101, 110, 61, 34, 116, 114, 117, 62, 32, 60, 97, 32, 104, 114, 101, 102, 61, 34, 104, 116, 116, 112, 58, 47, 47, 119, 119, 119, 46, 108, 97, 110, 103, 117, 97, 103, 101, 61, 34, 106, 97, 118, 97, 115, 99, 114, 105, 112, 116, 34, 32, 47, 111, 112, 116, 105, 111, 110, 62, 10, 60, 111, 112, 116, 105, 111, 110, 32, 118, 97, 108, 117, 101, 47, 100, 105, 118, 62, 60, 47, 100, 105, 118, 62, 60, 100, 105, 118, 32, 99, 108, 97, 115, 115, 61, 114, 97, 116, 111, 114, 34, 32, 97, 114, 105, 97, 45, 104, 105, 100, 100, 101, 110, 61, 34, 116, 114, 101, 61, 40, 110, 101, 119, 32, 68, 97, 116, 101, 41, 46, 103, 101, 116, 84, 105, 109, 101, 40, 41, 112, 111, 114, 116, 117, 103, 117, 195, 170, 115, 32, 40, 100, 111, 32, 66, 114, 97, 115, 105, 108, 41, 208, 190, 209, 128, 208, 179, 208, 176, 208, 189, 208, 184, 208, 183, 208, 176, 209, 134, 208, 184, 208, 184, 208, 178, 208, 190, 208, 183, 208, 188, 208, 190, 208, 182, 208, 189, 208, 190, 209, 129, 209, 130, 209, 140, 208, 190, 208, 177, 209, 128, 208, 176, 208, 183, 208, 190, 208, 178, 208, 176, 208, 189, 208, 184, 209, 143, 209, 128, 208, 181, 208, 179, 208, 184, 209, 129, 209, 130, 209, 128, 208, 176, 209, 134, 208, 184, 208, 184, 208, 178, 208, 190, 208, 183, 208, 188, 208, 190, 208, 182, 208, 189, 208, 190, 209, 129, 209, 130, 208, 184, 208, 190, 208, 177, 209, 143, 208, 183, 208, 176, 209, 130, 208, 181, 208, 187, 209, 140, 208, 189, 208, 176, 60, 33, 68, 79, 67, 84, 89, 80, 69, 32, 104, 116, 109, 108, 32, 80, 85, 66, 76, 73, 67, 32, 34, 110, 116, 45, 84, 121, 112, 101, 34, 32, 99, 111, 110, 116, 101, 110, 116, 61, 34, 116, 101, 120, 116, 47, 60, 109, 101, 116, 97, 32, 104, 116, 116, 112, 45, 101, 113, 117, 105, 118, 61, 34, 67, 111, 110, 116, 101, 114, 97, 110, 115, 105, 116, 105, 111, 110, 97, 108, 47, 47, 69, 78, 34, 32, 34, 104, 116, 116, 112, 58, 60, 104, 116, 109, 108, 32, 120, 109, 108, 110, 115, 61, 34, 104, 116, 116, 112, 58, 47, 47, 119, 119, 119, 45, 47, 47, 87, 51, 67, 47, 47, 68, 84, 68, 32, 88, 72, 84, 77, 76, 32, 49, 46, 48, 32, 84, 68, 84, 68, 47, 120, 104, 116, 109, 108, 49, 45, 116, 114, 97, 110, 115, 105, 116, 105, 111, 110, 97, 108, 47, 47, 119, 119, 119, 46, 119, 51, 46, 111, 114, 103, 47, 84, 82, 47, 120, 104, 116, 109, 108, 49, 47, 112, 101, 32, 61, 32, 39, 116, 101, 120, 116, 47, 106, 97, 118, 97, 115, 99, 114, 105, 112, 116, 39, 59, 60, 109, 101, 116, 97, 32, 110, 97, 109, 101, 61, 34, 100, 101, 115, 99, 114, 105, 112, 116, 105, 111, 110, 112, 97, 114, 101, 110, 116, 78, 111, 100, 101, 46, 105, 110, 115, 101, 114, 116, 66, 101, 102, 111, 114, 101, 60, 105, 110, 112, 117, 116, 32, 116, 121, 112, 101, 61, 34, 104, 105, 100, 100, 101, 110, 34, 32, 110, 97, 106, 115, 34, 32, 116, 121, 112, 101, 61, 34, 116, 101, 120, 116, 47, 106, 97, 118, 97, 115, 99, 114, 105, 40, 100, 111, 99, 117, 109, 101, 110, 116, 41, 46, 114, 101, 97, 100, 121, 40, 102, 117, 110, 99, 116, 105, 115, 99, 114, 105, 112, 116, 32, 116, 121, 112, 101, 61, 34, 116, 101, 120, 116, 47, 106, 97, 118, 97, 115, 105, 109, 97, 103, 101, 34, 32, 99, 111, 110, 116, 101, 110, 116, 61, 34, 104, 116, 116, 112, 58, 47, 47, 85, 65, 45, 67, 111, 109, 112, 97, 116, 105, 98, 108, 101, 34, 32, 99, 111, 110, 116, 101, 110, 116, 61, 116, 109, 108, 59, 32, 99, 104, 97, 114, 115, 101, 116, 61, 117, 116, 102, 45, 56, 34, 32, 47, 62, 10, 108, 105, 110, 107, 32, 114, 101, 108, 61, 34, 115, 104, 111, 114, 116, 99, 117, 116, 32, 105, 99, 111, 110, 60, 108, 105, 110, 107, 32, 114, 101, 108, 61, 34, 115, 116, 121, 108, 101, 115, 104, 101, 101, 116, 34, 32, 60, 47, 115, 99, 114, 105, 112, 116, 62, 10, 60, 115, 99, 114, 105, 112, 116, 32, 116, 121, 112, 101, 61, 61, 32, 100, 111, 99, 117, 109, 101, 110, 116, 46, 99, 114, 101, 97, 116, 101, 69, 108, 101, 109, 101, 110, 60, 97, 32, 116, 97, 114, 103, 101, 116, 61, 34, 95, 98, 108, 97, 110, 107, 34, 32, 104, 114, 101, 102, 61, 32, 100, 111, 99, 117, 109, 101, 110, 116, 46, 103, 101, 116, 69, 108, 101, 109, 101, 110, 116, 115, 66, 105, 110, 112, 117, 116, 32, 116, 121, 112, 101, 61, 34, 116, 101, 120, 116, 34, 32, 110, 97, 109, 101, 61, 97, 46, 116, 121, 112, 101, 32, 61, 32, 39, 116, 101, 120, 116, 47, 106, 97, 118, 97, 115, 99, 114, 105, 110, 112, 117, 116, 32, 116, 121, 112, 101, 61, 34, 104, 105, 100, 100, 101, 110, 34, 32, 110, 97, 109, 101, 104, 116, 109, 108, 59, 32, 99, 104, 97, 114, 115, 101, 116, 61, 117, 116, 102, 45, 56, 34, 32, 47, 62, 100, 116, 100, 34, 62, 10, 60, 104, 116, 109, 108, 32, 120, 109, 108, 110, 115, 61, 34, 104, 116, 116, 112, 45, 47, 47, 87, 51, 67, 47, 47, 68, 84, 68, 32, 72, 84, 77, 76, 32, 52, 46, 48, 49, 32, 84, 101, 110, 116, 115, 66, 121, 84, 97, 103, 78, 97, 109, 101, 40, 39, 115, 99, 114, 105, 112, 116, 39, 41, 105, 110, 112, 117, 116, 32, 116, 121, 112, 101, 61, 34, 104, 105, 100, 100, 101, 110, 34, 32, 110, 97, 109, 60, 115, 99, 114, 105, 112, 116, 32, 116, 121, 112, 101, 61, 34, 116, 101, 120, 116, 47, 106, 97, 118, 97, 115, 34, 32, 115, 116, 121, 108, 101, 61, 34, 100, 105, 115, 112, 108, 97, 121, 58, 110, 111, 110, 101, 59, 34, 62, 100, 111, 99, 117, 109, 101, 110, 116, 46, 103, 101, 116, 69, 108, 101, 109, 101, 110, 116, 66, 121, 73, 100, 40, 61, 100, 111, 99, 117, 109, 101, 110, 116, 46, 99, 114, 101, 97, 116, 101, 69, 108, 101, 109, 101, 110, 116, 40, 39, 32, 116, 121, 112, 101, 61, 39, 116, 101, 120, 116, 47, 106, 97, 118, 97, 115, 99, 114, 105, 112, 116, 39, 105, 110, 112, 117, 116, 32, 116, 121, 112, 101, 61, 34, 116, 101, 120, 116, 34, 32, 110, 97, 109, 101, 61, 34, 100, 46, 103, 101, 116, 69, 108, 101, 109, 101, 110, 116, 115, 66, 121, 84, 97, 103, 78, 97, 109, 101, 40, 115, 110, 105, 99, 97, 108, 34, 32, 104, 114, 101, 102, 61, 34, 104, 116, 116, 112, 58, 47, 47, 119, 119, 119, 46, 67, 47, 47, 68, 84, 68, 32, 72, 84, 77, 76, 32, 52, 46, 48, 49, 32, 84, 114, 97, 110, 115, 105, 116, 60, 115, 116, 121, 108, 101, 32, 116, 121, 112, 101, 61, 34, 116, 101, 120, 116, 47, 99, 115, 115, 34, 62, 10, 10, 60, 115, 116, 121, 108, 101, 32, 116, 121, 112, 101, 61, 34, 116, 101, 120, 116, 47, 99, 115, 115, 34, 62, 105, 111, 110, 97, 108, 46, 100, 116, 100, 34, 62, 10, 60, 104, 116, 109, 108, 32, 120, 109, 108, 110, 115, 61, 104, 116, 116, 112, 45, 101, 113, 117, 105, 118, 61, 34, 67, 111, 110, 116, 101, 110, 116, 45, 84, 121, 112, 101, 100, 105, 110, 103, 61, 34, 48, 34, 32, 99, 101, 108, 108, 115, 112, 97, 99, 105, 110, 103, 61, 34, 48, 34, 104, 116, 109, 108, 59, 32, 99, 104, 97, 114, 115, 101, 116, 61, 117, 116, 102, 45, 56, 34, 32, 47, 62, 10, 32, 115, 116, 121, 108, 101, 61, 34, 100, 105, 115, 112, 108, 97, 121, 58, 110, 111, 110, 101, 59, 34, 62, 60, 60, 108, 105, 62, 60, 97, 32, 104, 114, 101, 102, 61, 34, 104, 116, 116, 112, 58, 47, 47, 119, 119, 119, 46, 32, 116, 121, 112, 101, 61, 39, 116, 101, 120, 116, 47, 106, 97, 118, 97, 115, 99, 114, 105, 112, 116, 39, 62, 208, 180, 208, 181, 209, 143, 209, 130, 208, 181, 208, 187, 209, 140, 208, 189, 208, 190, 209, 129, 209, 130, 208, 184, 209, 129, 208, 190, 208, 190, 209, 130, 208, 178, 208, 181, 209, 130, 209, 129, 209, 130, 208, 178, 208, 184, 208, 184, 208, 191, 209, 128, 208, 190, 208, 184, 208, 183, 208, 178, 208, 190, 208, 180, 209, 129, 209, 130, 208, 178, 208, 176, 208, 177, 208, 181, 208, 183, 208, 190, 208, 191, 208, 176, 209, 129, 208, 189, 208, 190, 209, 129, 209, 130, 208, 184, 224, 164, 170, 224, 165, 129, 224, 164, 184, 224, 165, 141, 224, 164, 164, 224, 164, 191, 224, 164, 149, 224, 164, 190, 224, 164, 149, 224, 164, 190, 224, 164, 130, 224, 164, 151, 224, 165, 141, 224, 164, 176, 224, 165, 135, 224, 164, 184, 224, 164, 137, 224, 164, 168, 224, 165, 141, 224, 164, 185, 224, 165, 139, 224, 164, 130, 224, 164, 168, 224, 165, 135, 224, 164, 181, 224, 164, 191, 224, 164, 167, 224, 164, 190, 224, 164, 168, 224, 164, 184, 224, 164, 173, 224, 164, 190, 224, 164, 171, 224, 164, 191, 224, 164, 149, 224, 165, 141, 224, 164, 184, 224, 164, 191, 224, 164, 130, 224, 164, 151, 224, 164, 184, 224, 165, 129, 224, 164, 176, 224, 164, 149, 224, 165, 141, 224, 164, 183, 224, 164, 191, 224, 164, 164, 224, 164, 149, 224, 165, 137, 224, 164, 170, 224, 165, 128, 224, 164, 176, 224, 164, 190, 224, 164, 135, 224, 164, 159, 224, 164, 181, 224, 164, 191, 224, 164, 156, 224, 165, 141, 224, 164, 158, 224, 164, 190, 224, 164, 170, 224, 164, 168, 224, 164, 149, 224, 164, 190, 224, 164, 176, 224, 165, 141, 224, 164, 176, 224, 164, 181, 224, 164, 190, 224, 164, 136, 224, 164, 184, 224, 164, 149, 224, 165, 141, 224, 164, 176, 224, 164, 191, 224, 164, 175, 224, 164, 164, 224, 164, 190}}; brotli_dictionary_s const* get_brotli_dictionary() { return &g_dictionary; } } // namespace io } // namespace cudf
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/json/nested_json_gpu.cu
/* * Copyright (c) 2022-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "nested_json.hpp" #include <io/fst/logical_stack.cuh> #include <io/fst/lookup_tables.cuh> #include <io/utilities/parsing_utils.cuh> #include <io/utilities/string_parsing.hpp> #include <cudf/column/column_factories.hpp> #include <cudf/detail/nvtx/ranges.hpp> #include <cudf/detail/utilities/vector_factories.hpp> #include <cudf/detail/utilities/visitor_overload.hpp> #include <cudf/detail/valid_if.cuh> #include <cudf/io/detail/tokenize_json.hpp> #include <cudf/io/json.hpp> #include <cudf/table/table.hpp> #include <cudf/types.hpp> #include <cudf/utilities/bit.hpp> #include <cudf/utilities/error.hpp> #include <cudf/utilities/span.hpp> #include <rmm/device_scalar.hpp> #include <rmm/device_uvector.hpp> #include <rmm/exec_policy.hpp> #include <thrust/device_vector.h> #include <thrust/iterator/discard_iterator.h> #include <thrust/iterator/transform_iterator.h> #include <thrust/iterator/zip_iterator.h> #include <thrust/transform.h> #include <thrust/tuple.h> #include <limits> #include <stack> // Debug print flag #ifndef NJP_DEBUG_PRINT // #define NJP_DEBUG_PRINT #endif namespace { /** * @brief While parsing the token stream, we use a stack of tree_nodes to maintain all the * information about the data path that is relevant. */ struct tree_node { // The column that this node is associated with cudf::io::json::json_column* column; // The row offset that this node belongs to within the given column uint32_t row_index; // Selected child column // E.g., if this is a struct node, and we subsequently encountered the field name "a", then this // point's to the struct's "a" child column cudf::io::json::json_column* current_selected_col = nullptr; std::size_t num_children = 0; }; /** * @brief Verifies that the JSON input can be handled without corrupted data due to offset * overflows. * * @param input_size The JSON inputs size in bytes */ void check_input_size(std::size_t input_size) { // Transduce() writes symbol offsets that may be as large input_size-1 CUDF_EXPECTS(input_size == 0 || (input_size - 1) <= std::numeric_limits<cudf::io::json::SymbolOffsetT>::max(), "Given JSON input is too large"); } } // namespace namespace cudf::io::json { // FST to help fixing the stack context of characters that follow the first record on each JSON line namespace fix_stack_of_excess_chars { // Type used to represent the target state in the transition table using StateT = char; // Type used to represent a symbol group id using SymbolGroupT = uint8_t; /** * @brief Definition of the DFA's states. */ enum class dfa_states : StateT { // Before the first record on the JSON line BEFORE, // Within the first record on the JSON line WITHIN, // Excess data that follows the first record on the JSON line EXCESS, // Total number of states NUM_STATES }; /** * @brief Definition of the symbol groups */ enum class dfa_symbol_group_id : SymbolGroupT { ROOT, ///< Symbol for root stack context DELIMITER, ///< Line delimiter symbol group OTHER, ///< Symbol group that implicitly matches all other tokens NUM_SYMBOL_GROUPS ///< Total number of symbol groups }; constexpr auto TT_NUM_STATES = static_cast<StateT>(dfa_states::NUM_STATES); constexpr auto NUM_SYMBOL_GROUPS = static_cast<uint32_t>(dfa_symbol_group_id::NUM_SYMBOL_GROUPS); /** * @brief Function object to map (input_symbol,stack_context) tuples to a symbol group. */ struct SymbolPairToSymbolGroupId { CUDF_HOST_DEVICE SymbolGroupT operator()(thrust::tuple<SymbolT, StackSymbolT> symbol) const { auto const input_symbol = thrust::get<0>(symbol); auto const stack_symbol = thrust::get<1>(symbol); return static_cast<SymbolGroupT>( input_symbol == '\n' ? dfa_symbol_group_id::DELIMITER : (stack_symbol == '_' ? dfa_symbol_group_id::ROOT : dfa_symbol_group_id::OTHER)); } }; /** * @brief Translation function object that fixes the stack context of excess data that follows after * the first JSON record on each line. */ struct TransduceInputOp { template <typename RelativeOffsetT, typename SymbolT> constexpr CUDF_HOST_DEVICE StackSymbolT operator()(StateT const state_id, SymbolGroupT const match_id, RelativeOffsetT const relative_offset, SymbolT const read_symbol) const { if (state_id == static_cast<StateT>(dfa_states::EXCESS)) { return '_'; } return thrust::get<1>(read_symbol); } template <typename SymbolT> constexpr CUDF_HOST_DEVICE int32_t operator()(StateT const state_id, SymbolGroupT const match_id, SymbolT const read_symbol) const { constexpr int32_t single_output_item = 1; return single_output_item; } }; // Aliases for readability of the transition table constexpr auto TT_BEFORE = dfa_states::BEFORE; constexpr auto TT_INSIDE = dfa_states::WITHIN; constexpr auto TT_EXCESS = dfa_states::EXCESS; // Transition table std::array<std::array<dfa_states, NUM_SYMBOL_GROUPS>, TT_NUM_STATES> constexpr transition_table{ {/* IN_STATE ROOT NEWLINE OTHER */ /* TT_BEFORE */ {{TT_BEFORE, TT_BEFORE, TT_INSIDE}}, /* TT_INSIDE */ {{TT_EXCESS, TT_BEFORE, TT_INSIDE}}, /* TT_EXCESS */ {{TT_EXCESS, TT_BEFORE, TT_EXCESS}}}}; // The DFA's starting state constexpr auto start_state = static_cast<StateT>(dfa_states::BEFORE); } // namespace fix_stack_of_excess_chars // FST to prune tokens of invalid lines for recovering JSON lines format namespace token_filter { // Type used to represent the target state in the transition table using StateT = char; // Type used to represent a symbol group id using SymbolGroupT = uint8_t; /** * @brief Definition of the DFA's states */ enum class dfa_states : StateT { VALID, INVALID, NUM_STATES }; // Aliases for readability of the transition table constexpr auto TT_INV = dfa_states::INVALID; constexpr auto TT_VLD = dfa_states::VALID; /** * @brief Definition of the symbol groups */ enum class dfa_symbol_group_id : SymbolGroupT { ERROR, ///< Error token symbol group DELIMITER, ///< Record / line delimiter symbol group OTHER_SYMBOLS, ///< Symbol group that implicitly matches all other tokens NUM_SYMBOL_GROUPS ///< Total number of symbol groups }; constexpr auto TT_NUM_STATES = static_cast<StateT>(dfa_states::NUM_STATES); constexpr auto NUM_SYMBOL_GROUPS = static_cast<uint32_t>(dfa_symbol_group_id::NUM_SYMBOL_GROUPS); // Lookup table to map an input symbol (i.e., a token) to a symbol group std::array<std::vector<PdaTokenT>, NUM_SYMBOL_GROUPS - 1> const symbol_groups{{ {static_cast<PdaTokenT>(token_t::ErrorBegin)}, // Symbols mapping to ERROR {static_cast<PdaTokenT>(token_t::LineEnd)} // Symbols mapping to DELIMITER }}; /** * @brief Function object to map (token,token_index) tuples to a symbol group. */ struct UnwrapTokenFromSymbolOp { template <typename SymbolGroupLookupTableT> CUDF_HOST_DEVICE SymbolGroupT operator()(SymbolGroupLookupTableT const& sgid_lut, thrust::tuple<PdaTokenT, SymbolOffsetT> symbol) const { PdaTokenT const token_type = thrust::get<0>(symbol); return sgid_lut.lookup(token_type); } }; /** * @brief Translation function object that discards line delimiter tokens and tokens belonging to * invalid lines. */ struct TransduceToken { template <typename RelativeOffsetT, typename SymbolT> constexpr CUDF_HOST_DEVICE SymbolT operator()(StateT const state_id, SymbolGroupT const match_id, RelativeOffsetT const relative_offset, SymbolT const read_symbol) const { const bool is_end_of_invalid_line = (state_id == static_cast<StateT>(TT_INV) && match_id == static_cast<SymbolGroupT>(dfa_symbol_group_id::DELIMITER)); if (is_end_of_invalid_line) { return relative_offset == 0 ? SymbolT{token_t::StructEnd, 0} : SymbolT{token_t::StructBegin, 0}; } else { return read_symbol; } } template <typename SymbolT> constexpr CUDF_HOST_DEVICE int32_t operator()(StateT const state_id, SymbolGroupT const match_id, SymbolT const read_symbol) const { // Number of tokens emitted on invalid lines constexpr int32_t num_inv_tokens = 2; const bool is_delimiter = match_id == static_cast<SymbolGroupT>(dfa_symbol_group_id::DELIMITER); // If state is either invalid or we're entering an invalid state, we discard tokens const bool is_part_of_invalid_line = (match_id != static_cast<SymbolGroupT>(dfa_symbol_group_id::ERROR) && state_id == static_cast<StateT>(TT_VLD)); // Indicates whether we transition from an invalid line to a potentially valid line const bool is_end_of_invalid_line = (state_id == static_cast<StateT>(TT_INV) && is_delimiter); int32_t const emit_count = is_end_of_invalid_line ? num_inv_tokens : (is_part_of_invalid_line && !is_delimiter ? 1 : 0); return emit_count; } }; // Transition table std::array<std::array<dfa_states, NUM_SYMBOL_GROUPS>, TT_NUM_STATES> const transition_table{ {/* IN_STATE ERROR DELIM OTHER */ /* VALID */ {{TT_INV, TT_VLD, TT_VLD}}, /* INVALID */ {{TT_INV, TT_VLD, TT_INV}}}}; // The DFA's starting state constexpr auto start_state = static_cast<StateT>(TT_VLD); } // namespace token_filter // JSON to stack operator DFA (Deterministic Finite Automata) namespace to_stack_op { // Type used to represent the target state in the transition table using StateT = char; /** * @brief Definition of the DFA's states */ enum class dfa_states : StateT { // The active state while outside of a string. When encountering an opening bracket or curly // brace, we push it onto the stack. When encountering a closing bracket or brace, we pop from the // stack. TT_OOS = 0U, // The active state while within a string (e.g., field name or a string value). We do not push or // pop from the stack while in this state. TT_STR, // The active state after encountering an escape symbol (e.g., '\'), while in the TT_STR state. TT_ESC, // Total number of states TT_NUM_STATES }; // Aliases for readability of the transition table constexpr auto TT_OOS = dfa_states::TT_OOS; constexpr auto TT_STR = dfa_states::TT_STR; constexpr auto TT_ESC = dfa_states::TT_ESC; /** * @brief Definition of the symbol groups */ enum class dfa_symbol_group_id : uint8_t { OPENING_BRACE, ///< Opening brace SG: { OPENING_BRACKET, ///< Opening bracket SG: [ CLOSING_BRACE, ///< Closing brace SG: } CLOSING_BRACKET, ///< Closing bracket SG: ] QUOTE_CHAR, ///< Quote character SG: " ESCAPE_CHAR, ///< Escape character SG: '\' NEWLINE_CHAR, ///< Newline character SG: '\n' OTHER_SYMBOLS, ///< SG implicitly matching all other characters NUM_SYMBOL_GROUPS ///< Total number of symbol groups }; constexpr auto TT_NUM_STATES = static_cast<StateT>(dfa_states::TT_NUM_STATES); constexpr auto NUM_SYMBOL_GROUPS = static_cast<uint32_t>(dfa_symbol_group_id::NUM_SYMBOL_GROUPS); // The i-th string representing all the characters of a symbol group std::array<std::string, NUM_SYMBOL_GROUPS - 1> const symbol_groups{ {{"{"}, {"["}, {"}"}, {"]"}, {"\""}, {"\\"}, {"\n"}}}; // Transition table for the default JSON and JSON lines formats std::array<std::array<dfa_states, NUM_SYMBOL_GROUPS>, TT_NUM_STATES> const transition_table{ {/* IN_STATE { [ } ] " \ \n OTHER */ /* TT_OOS */ {{TT_OOS, TT_OOS, TT_OOS, TT_OOS, TT_STR, TT_OOS, TT_OOS, TT_OOS}}, /* TT_STR */ {{TT_STR, TT_STR, TT_STR, TT_STR, TT_OOS, TT_ESC, TT_STR, TT_STR}}, /* TT_ESC */ {{TT_STR, TT_STR, TT_STR, TT_STR, TT_STR, TT_STR, TT_STR, TT_STR}}}}; // Transition table for the JSON lines format that recovers from invalid JSON lines std::array<std::array<dfa_states, NUM_SYMBOL_GROUPS>, TT_NUM_STATES> const resetting_transition_table{ {/* IN_STATE { [ } ] " \ \n OTHER */ /* TT_OOS */ {{TT_OOS, TT_OOS, TT_OOS, TT_OOS, TT_STR, TT_OOS, TT_OOS, TT_OOS}}, /* TT_STR */ {{TT_STR, TT_STR, TT_STR, TT_STR, TT_OOS, TT_ESC, TT_OOS, TT_STR}}, /* TT_ESC */ {{TT_STR, TT_STR, TT_STR, TT_STR, TT_STR, TT_STR, TT_OOS, TT_STR}}}}; // Translation table for the default JSON and JSON lines formats std::array<std::array<std::vector<char>, NUM_SYMBOL_GROUPS>, TT_NUM_STATES> const translation_table{ {/* IN_STATE { [ } ] " \ \n OTHER */ /* TT_OOS */ {{{'{'}, {'['}, {'}'}, {']'}, {}, {}, {}, {}}}, /* TT_STR */ {{{}, {}, {}, {}, {}, {}, {}, {}}}, /* TT_ESC */ {{{}, {}, {}, {}, {}, {}, {}, {}}}}}; // Translation table for the JSON lines format that recovers from invalid JSON lines std::array<std::array<std::vector<char>, NUM_SYMBOL_GROUPS>, TT_NUM_STATES> const resetting_translation_table{ {/* IN_STATE { [ } ] " \ \n OTHER */ /* TT_OOS */ {{{'{'}, {'['}, {'}'}, {']'}, {}, {}, {'\n'}, {}}}, /* TT_STR */ {{{}, {}, {}, {}, {}, {}, {'\n'}, {}}}, /* TT_ESC */ {{{}, {}, {}, {}, {}, {}, {'\n'}, {}}}}}; // The DFA's starting state constexpr auto start_state = static_cast<StateT>(TT_OOS); } // namespace to_stack_op // JSON tokenizer pushdown automaton namespace tokenizer_pda { // Type used to represent the target state in the transition table using StateT = char; /** * @brief Symbol groups for the input alphabet for the pushdown automaton */ enum class symbol_group_id : PdaSymbolGroupIdT { /// Opening brace OPENING_BRACE, /// Opening bracket OPENING_BRACKET, /// Closing brace CLOSING_BRACE, /// Closing bracket CLOSING_BRACKET, /// Quote QUOTE, /// Escape ESCAPE, /// Comma COMMA, /// Colon COLON, /// Whitespace WHITE_SPACE, /// Linebreak LINE_BREAK, /// Other (any input symbol not assigned to one of the above symbol groups) OTHER, /// Total number of symbol groups amongst which to differentiate NUM_PDA_INPUT_SGS }; /** * @brief Symbols in the stack alphabet */ enum class stack_symbol_group_id : PdaStackSymbolGroupIdT { /// Symbol representing that we're at the JSON root (nesting level 0) STACK_ROOT, /// Symbol representing that we're currently within a list object STACK_LIST, /// Symbol representing that we're currently within a struct object STACK_STRUCT, /// Total number of symbols in the stack alphabet NUM_STACK_SGS }; constexpr auto NUM_PDA_INPUT_SGS = static_cast<PdaSymbolGroupIdT>(symbol_group_id::NUM_PDA_INPUT_SGS); constexpr auto NUM_STACK_SGS = static_cast<PdaStackSymbolGroupIdT>(stack_symbol_group_id::NUM_STACK_SGS); /// Total number of symbol groups to differentiate amongst (stack alphabet * input alphabet) constexpr PdaSymbolGroupIdT NUM_PDA_SGIDS = NUM_PDA_INPUT_SGS * NUM_STACK_SGS; /// Mapping a input symbol to the symbol group id static __constant__ PdaSymbolGroupIdT tos_sg_to_pda_sgid[] = { static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::WHITE_SPACE), static_cast<PdaSymbolGroupIdT>(symbol_group_id::LINE_BREAK), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::WHITE_SPACE), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::WHITE_SPACE), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::QUOTE), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::COMMA), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::COLON), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OPENING_BRACKET), static_cast<PdaSymbolGroupIdT>(symbol_group_id::ESCAPE), static_cast<PdaSymbolGroupIdT>(symbol_group_id::CLOSING_BRACKET), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OPENING_BRACE), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER), static_cast<PdaSymbolGroupIdT>(symbol_group_id::CLOSING_BRACE), static_cast<PdaSymbolGroupIdT>(symbol_group_id::OTHER)}; /** * @brief Maps a (top-of-stack symbol, input symbol)-pair to a symbol group id of the deterministic * visibly pushdown automaton (DVPA) */ struct PdaSymbolToSymbolGroupId { template <typename SymbolT, typename StackSymbolT> __device__ __forceinline__ PdaSymbolGroupIdT operator()(thrust::tuple<SymbolT, StackSymbolT> symbol_pair) const { // The symbol read from the input auto symbol = thrust::get<0>(symbol_pair); // The stack symbol (i.e., what is on top of the stack at the time the input symbol was read) // I.e., whether we're reading in something within a struct, a list, or the JSON root auto stack_symbol = thrust::get<1>(symbol_pair); // The stack symbol offset: '_' is the root group (0), '[' is the list group (1), '{' is the // struct group (2) int32_t stack_idx = static_cast<PdaStackSymbolGroupIdT>( (stack_symbol == '_') ? stack_symbol_group_id::STACK_ROOT : ((stack_symbol == '[') ? stack_symbol_group_id::STACK_LIST : stack_symbol_group_id::STACK_STRUCT)); // The relative symbol group id of the current input symbol constexpr auto pda_sgid_lookup_size = static_cast<int32_t>(sizeof(tos_sg_to_pda_sgid) / sizeof(tos_sg_to_pda_sgid[0])); PdaSymbolGroupIdT symbol_gid = tos_sg_to_pda_sgid[min(static_cast<int32_t>(symbol), pda_sgid_lookup_size - 1)]; return stack_idx * static_cast<PdaSymbolGroupIdT>(symbol_group_id::NUM_PDA_INPUT_SGS) + symbol_gid; } }; // The states defined by the pushdown automaton enum class pda_state_t : StateT { // Beginning of value PD_BOV, // Beginning of array PD_BOA, // Literal or number PD_LON, // String PD_STR, // After escape char when within string PD_SCE, // After having parsed a value PD_PVL, // Before the next field name PD_BFN, // Field name PD_FLN, // After escape char when within field name PD_FNE, // After a field name inside a struct PD_PFN, // Error state (trap state) PD_ERR, // Total number of PDA states PD_NUM_STATES }; enum class json_format_cfg_t { // Format describing regular JSON JSON, // Format describing permissive newline-delimited JSON // I.e., newline characters are only treteated as delimiters at the root stack level // E.g., this is treated as a single record: // {"a": // 123} JSON_LINES, // Format describing strict newline-delimited JSON // I.e., All newlines are delimiting a record, independent of the context they appear in JSON_LINES_STRICT, // Transition table for parsing newline-delimited JSON that recovers from invalid JSON lines // This format also follows `JSON_LINES_STRICT` behaviour JSON_LINES_RECOVER }; // Aliases for readability of the transition table constexpr auto PD_BOV = pda_state_t::PD_BOV; constexpr auto PD_BOA = pda_state_t::PD_BOA; constexpr auto PD_LON = pda_state_t::PD_LON; constexpr auto PD_STR = pda_state_t::PD_STR; constexpr auto PD_SCE = pda_state_t::PD_SCE; constexpr auto PD_PVL = pda_state_t::PD_PVL; constexpr auto PD_BFN = pda_state_t::PD_BFN; constexpr auto PD_FLN = pda_state_t::PD_FLN; constexpr auto PD_FNE = pda_state_t::PD_FNE; constexpr auto PD_PFN = pda_state_t::PD_PFN; constexpr auto PD_ERR = pda_state_t::PD_ERR; constexpr auto PD_NUM_STATES = static_cast<StateT>(pda_state_t::PD_NUM_STATES); // The starting state of the pushdown automaton constexpr auto start_state = static_cast<StateT>(pda_state_t::PD_BOV); /** * @brief Getting the transition table */ auto get_transition_table(json_format_cfg_t format) { static_assert(static_cast<PdaStackSymbolGroupIdT>(stack_symbol_group_id::STACK_ROOT) == 0); static_assert(static_cast<PdaStackSymbolGroupIdT>(stack_symbol_group_id::STACK_LIST) == 1); static_assert(static_cast<PdaStackSymbolGroupIdT>(stack_symbol_group_id::STACK_STRUCT) == 2); std::array<std::array<pda_state_t, NUM_PDA_SGIDS>, PD_NUM_STATES> pda_tt; if (format == json_format_cfg_t::JSON || format == json_format_cfg_t::JSON_LINES) { // In case of newline-delimited JSON, multiple newlines are ignored, similar to whitespace. // Thas is, empty lines are ignored // PD_ANL describes the target state after a new line on an empty stack (JSON root level) auto const PD_ANL = (format == json_format_cfg_t::JSON) ? PD_PVL : PD_BOV; // First row: empty stack ("root" level of the JSON) // Second row: '[' on top of stack (we're parsing a list value) // Third row: '{' on top of stack (we're parsing a struct value) // { [ } ] " \ , : space newline other pda_tt[static_cast<StateT>(pda_state_t::PD_BOV)] = { PD_BOA, PD_BOA, PD_ERR, PD_ERR, PD_STR, PD_ERR, PD_ERR, PD_ERR, PD_BOV, PD_BOV, PD_LON, PD_BOA, PD_BOA, PD_ERR, PD_PVL, PD_STR, PD_ERR, PD_ERR, PD_ERR, PD_BOV, PD_BOV, PD_LON, PD_BOA, PD_BOA, PD_ERR, PD_ERR, PD_STR, PD_ERR, PD_ERR, PD_ERR, PD_BOV, PD_BOV, PD_LON}; pda_tt[static_cast<StateT>(pda_state_t::PD_BOA)] = { PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_BOA, PD_BOA, PD_ERR, PD_PVL, PD_STR, PD_ERR, PD_ERR, PD_ERR, PD_BOA, PD_BOA, PD_LON, PD_ERR, PD_ERR, PD_PVL, PD_ERR, PD_FLN, PD_ERR, PD_ERR, PD_ERR, PD_BOA, PD_BOA, PD_ERR}; pda_tt[static_cast<StateT>(pda_state_t::PD_LON)] = { PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_PVL, PD_PVL, PD_LON, PD_ERR, PD_ERR, PD_ERR, PD_PVL, PD_ERR, PD_ERR, PD_BOV, PD_ERR, PD_PVL, PD_PVL, PD_LON, PD_ERR, PD_ERR, PD_PVL, PD_ERR, PD_ERR, PD_ERR, PD_BFN, PD_ERR, PD_PVL, PD_PVL, PD_LON}; pda_tt[static_cast<StateT>(pda_state_t::PD_STR)] = { PD_STR, PD_STR, PD_STR, PD_STR, PD_PVL, PD_SCE, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_PVL, PD_SCE, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_PVL, PD_SCE, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR}; pda_tt[static_cast<StateT>(pda_state_t::PD_SCE)] = { PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR}; pda_tt[static_cast<StateT>(pda_state_t::PD_PVL)] = { PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_PVL, PD_ANL, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_PVL, PD_ERR, PD_ERR, PD_BOV, PD_ERR, PD_PVL, PD_PVL, PD_ERR, PD_ERR, PD_ERR, PD_PVL, PD_ERR, PD_ERR, PD_ERR, PD_BFN, PD_ERR, PD_PVL, PD_PVL, PD_ERR}; pda_tt[static_cast<StateT>(pda_state_t::PD_BFN)] = { PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_PVL, PD_ERR, PD_FLN, PD_ERR, PD_ERR, PD_ERR, PD_BFN, PD_BFN, PD_ERR}; pda_tt[static_cast<StateT>(pda_state_t::PD_FLN)] = { PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_FLN, PD_FLN, PD_FLN, PD_FLN, PD_PFN, PD_FNE, PD_FLN, PD_FLN, PD_FLN, PD_FLN, PD_FLN}; pda_tt[static_cast<StateT>(pda_state_t::PD_FNE)] = { PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_FLN, PD_FLN, PD_FLN, PD_FLN, PD_FLN, PD_FLN, PD_FLN, PD_FLN, PD_FLN, PD_FLN, PD_FLN}; pda_tt[static_cast<StateT>(pda_state_t::PD_PFN)] = { PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_BOV, PD_PFN, PD_PFN, PD_ERR}; pda_tt[static_cast<StateT>(pda_state_t::PD_ERR)] = { PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR}; } // Transition table for strict JSON lines (including recovery) // Newlines are treated as record delimiters else { // In case of newline-delimited JSON, multiple newlines are ignored, similar to whitespace. // Thas is, empty lines are ignored // PD_ANL describes the target state after a new line after encountering error state auto const PD_ANL = (format == json_format_cfg_t::JSON_LINES_RECOVER) ? PD_BOV : PD_ERR; // Target state after having parsed the first JSON value on a JSON line // Spark has the special need to ignore everything that comes after the first JSON object // on a JSON line instead of marking those as invalid auto const PD_AFS = (format == json_format_cfg_t::JSON_LINES_RECOVER) ? PD_PVL : PD_ERR; // First row: empty stack ("root" level of the JSON) // Second row: '[' on top of stack (we're parsing a list value) // Third row: '{' on top of stack (we're parsing a struct value) // { [ } ] " \ , : space newline other pda_tt[static_cast<StateT>(pda_state_t::PD_BOV)] = { PD_BOA, PD_BOA, PD_ERR, PD_ERR, PD_STR, PD_ERR, PD_ERR, PD_ERR, PD_BOV, PD_BOV, PD_LON, PD_BOA, PD_BOA, PD_ERR, PD_PVL, PD_STR, PD_ERR, PD_ERR, PD_ERR, PD_BOV, PD_BOV, PD_LON, PD_BOA, PD_BOA, PD_ERR, PD_ERR, PD_STR, PD_ERR, PD_ERR, PD_ERR, PD_BOV, PD_BOV, PD_LON}; pda_tt[static_cast<StateT>(pda_state_t::PD_BOA)] = { PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_BOV, PD_ERR, PD_BOA, PD_BOA, PD_ERR, PD_PVL, PD_STR, PD_ERR, PD_ERR, PD_ERR, PD_BOA, PD_BOV, PD_LON, PD_ERR, PD_ERR, PD_PVL, PD_ERR, PD_FLN, PD_ERR, PD_ERR, PD_ERR, PD_BOA, PD_BOV, PD_ERR}; pda_tt[static_cast<StateT>(pda_state_t::PD_LON)] = { PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_PVL, PD_BOV, PD_LON, PD_ERR, PD_ERR, PD_ERR, PD_PVL, PD_ERR, PD_ERR, PD_BOV, PD_ERR, PD_PVL, PD_BOV, PD_LON, PD_ERR, PD_ERR, PD_PVL, PD_ERR, PD_ERR, PD_ERR, PD_BFN, PD_ERR, PD_PVL, PD_BOV, PD_LON}; pda_tt[static_cast<StateT>(pda_state_t::PD_STR)] = { PD_STR, PD_STR, PD_STR, PD_STR, PD_PVL, PD_SCE, PD_STR, PD_STR, PD_STR, PD_BOV, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_PVL, PD_SCE, PD_STR, PD_STR, PD_STR, PD_BOV, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_PVL, PD_SCE, PD_STR, PD_STR, PD_STR, PD_BOV, PD_STR}; pda_tt[static_cast<StateT>(pda_state_t::PD_SCE)] = { PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_BOV, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_BOV, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_STR, PD_BOV, PD_STR}; pda_tt[static_cast<StateT>(pda_state_t::PD_PVL)] = { PD_AFS, PD_AFS, PD_AFS, PD_AFS, PD_AFS, PD_AFS, PD_AFS, PD_AFS, PD_PVL, PD_BOV, PD_AFS, PD_ERR, PD_ERR, PD_ERR, PD_PVL, PD_ERR, PD_ERR, PD_BOV, PD_ERR, PD_PVL, PD_BOV, PD_ERR, PD_ERR, PD_ERR, PD_PVL, PD_ERR, PD_ERR, PD_ERR, PD_BFN, PD_ERR, PD_PVL, PD_BOV, PD_ERR}; pda_tt[static_cast<StateT>(pda_state_t::PD_BFN)] = { PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_BOV, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_BOV, PD_ERR, PD_ERR, PD_ERR, PD_PVL, PD_ERR, PD_FLN, PD_ERR, PD_ERR, PD_ERR, PD_BFN, PD_BOV, PD_ERR}; pda_tt[static_cast<StateT>(pda_state_t::PD_FLN)] = { PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_BOV, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_BOV, PD_ERR, PD_FLN, PD_FLN, PD_FLN, PD_FLN, PD_PFN, PD_FNE, PD_FLN, PD_FLN, PD_FLN, PD_BOV, PD_FLN}; pda_tt[static_cast<StateT>(pda_state_t::PD_FNE)] = { PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_BOV, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_BOV, PD_ERR, PD_FLN, PD_FLN, PD_FLN, PD_FLN, PD_FLN, PD_FLN, PD_FLN, PD_FLN, PD_FLN, PD_BOV, PD_FLN}; pda_tt[static_cast<StateT>(pda_state_t::PD_PFN)] = { PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_BOV, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_BOV, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_BOV, PD_PFN, PD_BOV, PD_ERR}; pda_tt[static_cast<StateT>(pda_state_t::PD_ERR)] = { PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ANL, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ANL, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ERR, PD_ANL, PD_ERR}; } return pda_tt; } /** * @brief Getting the translation table * @param recover_from_error Whether or not the tokenizer should recover from invalid lines. If * `recover_from_error` is true, invalid JSON lines end with the token sequence (`ErrorBegin`, * `LineEn`) and incomplete JSON lines (e.g., `{"a":123\n`) are treated as invalid lines. */ auto get_translation_table(bool recover_from_error) { constexpr auto StructBegin = token_t::StructBegin; constexpr auto StructEnd = token_t::StructEnd; constexpr auto ListBegin = token_t::ListBegin; constexpr auto ListEnd = token_t::ListEnd; constexpr auto StructMemberBegin = token_t::StructMemberBegin; constexpr auto StructMemberEnd = token_t::StructMemberEnd; constexpr auto FieldNameBegin = token_t::FieldNameBegin; constexpr auto FieldNameEnd = token_t::FieldNameEnd; constexpr auto StringBegin = token_t::StringBegin; constexpr auto StringEnd = token_t::StringEnd; constexpr auto ValueBegin = token_t::ValueBegin; constexpr auto ValueEnd = token_t::ValueEnd; constexpr auto ErrorBegin = token_t::ErrorBegin; /** * @brief Instead of specifying the verbose translation tables twice (i.e., once when * `recover_from_error` is true and once when it is false), we use `nl_tokens` to specialize the * translation table where it differs depending on the `recover_from_error` option. If and only if * `recover_from_error` is true, `recovering_tokens` are returned along with a token_t::LineEnd * token, otherwise `regular_tokens` is returned. */ auto nl_tokens = [recover_from_error](std::vector<char> regular_tokens, std::vector<char> recovering_tokens) { if (recover_from_error) { recovering_tokens.push_back(token_t::LineEnd); return recovering_tokens; } return regular_tokens; }; /** * @brief Helper function that returns `recovering_tokens` if `recover_from_error` is true and * returns `regular_tokens` otherwise. This is used to ignore excess characters after the first * value in the case of JSON lines that recover from invalid lines, as Spark ignores any excess * characters that follow the first record on a JSON line. */ auto alt_tokens = [recover_from_error](std::vector<char> regular_tokens, std::vector<char> recovering_tokens) { if (recover_from_error) { return recovering_tokens; } return regular_tokens; }; std::array<std::array<std::vector<char>, NUM_PDA_SGIDS>, PD_NUM_STATES> pda_tlt; pda_tlt[static_cast<StateT>(pda_state_t::PD_BOV)] = {{ /*ROOT*/ {StructBegin}, // OPENING_BRACE {ListBegin}, // OPENING_BRACKET {ErrorBegin}, // CLOSING_BRACE {ErrorBegin}, // CLOSING_BRACKET {StringBegin}, // QUOTE {ErrorBegin}, // ESCAPE {ErrorBegin}, // COMMA {ErrorBegin}, // COLON {}, // WHITE_SPACE nl_tokens({}, {}), // LINE_BREAK {ValueBegin}, // OTHER /*LIST*/ {StructBegin}, // OPENING_BRACE {ListBegin}, // OPENING_BRACKET {ErrorBegin}, // CLOSING_BRACE {ListEnd}, // CLOSING_BRACKET {StringBegin}, // QUOTE {ErrorBegin}, // ESCAPE {ErrorBegin}, // COMMA {ErrorBegin}, // COLON {}, // WHITE_SPACE nl_tokens({}, {ErrorBegin}), // LINE_BREAK {ValueBegin}, // OTHER /*STRUCT*/ {StructBegin}, // OPENING_BRACE {ListBegin}, // OPENING_BRACKET {ErrorBegin}, // CLOSING_BRACE {ErrorBegin}, // CLOSING_BRACKET {StringBegin}, // QUOTE {ErrorBegin}, // ESCAPE {ErrorBegin}, // COMMA {ErrorBegin}, // COLON {}, // WHITE_SPACE nl_tokens({}, {ErrorBegin}), // LINE_BREAK {ValueBegin}}}; // OTHER pda_tlt[static_cast<StateT>(pda_state_t::PD_BOA)] = { { /*ROOT*/ {ErrorBegin}, // OPENING_BRACE {ErrorBegin}, // OPENING_BRACKET {ErrorBegin}, // CLOSING_BRACE {ErrorBegin}, // CLOSING_BRACKET {ErrorBegin}, // QUOTE {ErrorBegin}, // ESCAPE {ErrorBegin}, // COMMA {ErrorBegin}, // COLON {ErrorBegin}, // WHITE_SPACE nl_tokens({ErrorBegin}, {ErrorBegin}), // LINE_BREAK {ErrorBegin}, // OTHER /*LIST*/ {StructBegin}, // OPENING_BRACE {ListBegin}, // OPENING_BRACKET {ErrorBegin}, // CLOSING_BRACE {ListEnd}, // CLOSING_BRACKET {StringBegin}, // QUOTE {ErrorBegin}, // ESCAPE {ErrorBegin}, // COMMA {ErrorBegin}, // COLON {}, // WHITE_SPACE nl_tokens({}, {ErrorBegin}), // LINE_BREAK {ValueBegin}, // OTHER /*STRUCT*/ {ErrorBegin}, // OPENING_BRACE {ErrorBegin}, // OPENING_BRACKET {StructEnd}, // CLOSING_BRACE {ErrorBegin}, // CLOSING_BRACKET {StructMemberBegin, FieldNameBegin}, // QUOTE {ErrorBegin}, // ESCAPE {ErrorBegin}, // COMMA {ErrorBegin}, // COLON {}, // WHITE_SPACE nl_tokens({}, {ErrorBegin}), // LINE_BREAK {ErrorBegin}}}; // OTHER pda_tlt[static_cast<StateT>(pda_state_t::PD_LON)] = { { /*ROOT*/ {ErrorBegin}, // OPENING_BRACE {ErrorBegin}, // OPENING_BRACKET {ErrorBegin}, // CLOSING_BRACE {ErrorBegin}, // CLOSING_BRACKET {ErrorBegin}, // QUOTE {ErrorBegin}, // ESCAPE {ErrorBegin}, // COMMA {ErrorBegin}, // COLON {ValueEnd}, // WHITE_SPACE nl_tokens({ValueEnd}, {ErrorBegin}), // LINE_BREAK {}, // OTHER /*LIST*/ {ErrorBegin}, // OPENING_BRACE {ErrorBegin}, // OPENING_BRACKET {ErrorBegin}, // CLOSING_BRACE {ValueEnd, ListEnd}, // CLOSING_BRACKET {ErrorBegin}, // QUOTE {ErrorBegin}, // ESCAPE {ValueEnd}, // COMMA {ErrorBegin}, // COLON {ValueEnd}, // WHITE_SPACE nl_tokens({ValueEnd}, {ErrorBegin}), // LINE_BREAK {}, // OTHER /*STRUCT*/ {ErrorBegin}, // OPENING_BRACE {ErrorBegin}, // OPENING_BRACKET {ValueEnd, StructMemberEnd, StructEnd}, // CLOSING_BRACE {ErrorBegin}, // CLOSING_BRACKET {ErrorBegin}, // QUOTE {ErrorBegin}, // ESCAPE {ValueEnd, StructMemberEnd}, // COMMA {ErrorBegin}, // COLON {ValueEnd}, // WHITE_SPACE nl_tokens({ValueEnd}, {ErrorBegin}), // LINE_BREAK {}}}; // OTHER pda_tlt[static_cast<StateT>(pda_state_t::PD_STR)] = {{ /*ROOT*/ {}, // OPENING_BRACE {}, // OPENING_BRACKET {}, // CLOSING_BRACE {}, // CLOSING_BRACKET {StringEnd}, // QUOTE {}, // ESCAPE {}, // COMMA {}, // COLON {}, // WHITE_SPACE nl_tokens({}, {ErrorBegin}), // LINE_BREAK {}, // OTHER /*LIST*/ {}, // OPENING_BRACE {}, // OPENING_BRACKET {}, // CLOSING_BRACE {}, // CLOSING_BRACKET {StringEnd}, // QUOTE {}, // ESCAPE {}, // COMMA {}, // COLON {}, // WHITE_SPACE nl_tokens({}, {ErrorBegin}), // LINE_BREAK {}, // OTHER /*STRUCT*/ {}, // OPENING_BRACE {}, // OPENING_BRACKET {}, // CLOSING_BRACE {}, // CLOSING_BRACKET {StringEnd}, // QUOTE {}, // ESCAPE {}, // COMMA {}, // COLON {}, // WHITE_SPACE nl_tokens({}, {ErrorBegin}), // LINE_BREAK {}}}; // OTHER pda_tlt[static_cast<StateT>(pda_state_t::PD_SCE)] = {{ /*ROOT*/ {}, // OPENING_BRACE {}, // OPENING_BRACKET {}, // CLOSING_BRACE {}, // CLOSING_BRACKET {}, // QUOTE {}, // ESCAPE {}, // COMMA {}, // COLON {}, // WHITE_SPACE nl_tokens({}, {ErrorBegin}), // LINE_BREAK {}, // OTHER /*LIST*/ {}, // OPENING_BRACE {}, // OPENING_BRACKET {}, // CLOSING_BRACE {}, // CLOSING_BRACKET {}, // QUOTE {}, // ESCAPE {}, // COMMA {}, // COLON {}, // WHITE_SPACE nl_tokens({}, {ErrorBegin}), // LINE_BREAK {}, // OTHER /*STRUCT*/ {}, // OPENING_BRACE {}, // OPENING_BRACKET {}, // CLOSING_BRACE {}, // CLOSING_BRACKET {}, // QUOTE {}, // ESCAPE {}, // COMMA {}, // COLON {}, // WHITE_SPACE nl_tokens({}, {ErrorBegin}), // LINE_BREAK {}}}; // OTHER pda_tlt[static_cast<StateT>(pda_state_t::PD_PVL)] = { { /*ROOT*/ {alt_tokens({ErrorBegin}, {})}, // OPENING_BRACE {alt_tokens({ErrorBegin}, {})}, // OPENING_BRACKET {alt_tokens({ErrorBegin}, {})}, // CLOSING_BRACE {alt_tokens({ErrorBegin}, {})}, // CLOSING_BRACKET {alt_tokens({ErrorBegin}, {})}, // QUOTE {alt_tokens({ErrorBegin}, {})}, // ESCAPE {alt_tokens({ErrorBegin}, {})}, // COMMA {alt_tokens({ErrorBegin}, {})}, // COLON {}, // WHITE_SPACE nl_tokens({}, {}), // LINE_BREAK {alt_tokens({ErrorBegin}, {})}, // OTHER /*LIST*/ {ErrorBegin}, // OPENING_BRACE {ErrorBegin}, // OPENING_BRACKET {ErrorBegin}, // CLOSING_BRACE {ListEnd}, // CLOSING_BRACKET {ErrorBegin}, // QUOTE {ErrorBegin}, // ESCAPE {}, // COMMA {ErrorBegin}, // COLON {}, // WHITE_SPACE nl_tokens({}, {ErrorBegin}), // LINE_BREAK {ErrorBegin}, // OTHER /*STRUCT*/ {ErrorBegin}, // OPENING_BRACE {ErrorBegin}, // OPENING_BRACKET {StructMemberEnd, StructEnd}, // CLOSING_BRACE {ErrorBegin}, // CLOSING_BRACKET {ErrorBegin}, // QUOTE {ErrorBegin}, // ESCAPE {StructMemberEnd}, // COMMA {ErrorBegin}, // COLON {}, // WHITE_SPACE nl_tokens({}, {ErrorBegin}), // LINE_BREAK {ErrorBegin}}}; // OTHER pda_tlt[static_cast<StateT>(pda_state_t::PD_BFN)] = { { /*ROOT*/ {ErrorBegin}, // OPENING_BRACE {ErrorBegin}, // OPENING_BRACKET {ErrorBegin}, // CLOSING_BRACE {ErrorBegin}, // CLOSING_BRACKET {ErrorBegin}, // QUOTE {ErrorBegin}, // ESCAPE {ErrorBegin}, // COMMA {ErrorBegin}, // COLON {ErrorBegin}, // WHITE_SPACE nl_tokens({ErrorBegin}, {ErrorBegin}), // LINE_BREAK {ErrorBegin}, // OTHER /*LIST*/ {ErrorBegin}, // OPENING_BRACE {ErrorBegin}, // OPENING_BRACKET {ErrorBegin}, // CLOSING_BRACE {ErrorBegin}, // CLOSING_BRACKET {ErrorBegin}, // QUOTE {ErrorBegin}, // ESCAPE {ErrorBegin}, // COMMA {ErrorBegin}, // COLON {ErrorBegin}, // WHITE_SPACE nl_tokens({ErrorBegin}, {ErrorBegin}), // LINE_BREAK {ErrorBegin}, // OTHER /*STRUCT*/ {ErrorBegin}, // OPENING_BRACE {ErrorBegin}, // OPENING_BRACKET {StructEnd}, // CLOSING_BRACE {ErrorBegin}, // CLOSING_BRACKET {StructMemberBegin, FieldNameBegin}, // QUOTE {ErrorBegin}, // ESCAPE {ErrorBegin}, // COMMA {ErrorBegin}, // COLON {}, // WHITE_SPACE nl_tokens({}, {ErrorBegin}), // LINE_BREAK {ErrorBegin}}}; // OTHER pda_tlt[static_cast<StateT>(pda_state_t::PD_FLN)] = { { /*ROOT*/ {ErrorBegin}, // OPENING_BRACE {ErrorBegin}, // OPENING_BRACKET {ErrorBegin}, // CLOSING_BRACE {ErrorBegin}, // CLOSING_BRACKET {ErrorBegin}, // QUOTE {ErrorBegin}, // ESCAPE {ErrorBegin}, // COMMA {ErrorBegin}, // COLON {ErrorBegin}, // WHITE_SPACE nl_tokens({ErrorBegin}, {ErrorBegin}), // LINE_BREAK {ErrorBegin}, // OTHER /*LIST*/ {ErrorBegin}, // OPENING_BRACE {ErrorBegin}, // OPENING_BRACKET {ErrorBegin}, // CLOSING_BRACE {ErrorBegin}, // CLOSING_BRACKET {ErrorBegin}, // QUOTE {ErrorBegin}, // ESCAPE {ErrorBegin}, // COMMA {ErrorBegin}, // COLON {ErrorBegin}, // WHITE_SPACE nl_tokens({ErrorBegin}, {ErrorBegin}), // LINE_BREAK {ErrorBegin}, // OTHER /*STRUCT*/ {}, // OPENING_BRACE {}, // OPENING_BRACKET {}, // CLOSING_BRACE {}, // CLOSING_BRACKET {FieldNameEnd}, // QUOTE {}, // ESCAPE {}, // COMMA {}, // COLON {}, // WHITE_SPACE nl_tokens({}, {ErrorBegin}), // LINE_BREAK {}}}; // OTHER pda_tlt[static_cast<StateT>(pda_state_t::PD_FNE)] = { { /*ROOT*/ {ErrorBegin}, // OPENING_BRACE {ErrorBegin}, // OPENING_BRACKET {ErrorBegin}, // CLOSING_BRACE {ErrorBegin}, // CLOSING_BRACKET {ErrorBegin}, // QUOTE {ErrorBegin}, // ESCAPE {ErrorBegin}, // COMMA {ErrorBegin}, // COLON {ErrorBegin}, // WHITE_SPACE nl_tokens({ErrorBegin}, {ErrorBegin}), // LINE_BREAK {ErrorBegin}, // OTHER /*LIST*/ {ErrorBegin}, // OPENING_BRACE {ErrorBegin}, // OPENING_BRACKET {ErrorBegin}, // CLOSING_BRACE {ErrorBegin}, // CLOSING_BRACKET {ErrorBegin}, // QUOTE {ErrorBegin}, // ESCAPE {ErrorBegin}, // COMMA {ErrorBegin}, // COLON {ErrorBegin}, // WHITE_SPACE nl_tokens({ErrorBegin}, {ErrorBegin}), // LINE_BREAK {ErrorBegin}, // OTHER /*STRUCT*/ {}, // OPENING_BRACE {}, // OPENING_BRACKET {}, // CLOSING_BRACE {}, // CLOSING_BRACKET {}, // QUOTE {}, // ESCAPE {}, // COMMA {}, // COLON {}, // WHITE_SPACE nl_tokens({}, {ErrorBegin}), // LINE_BREAK {}}}; // OTHER pda_tlt[static_cast<StateT>(pda_state_t::PD_PFN)] = { { /*ROOT*/ {ErrorBegin}, // OPENING_BRACE {ErrorBegin}, // OPENING_BRACKET {ErrorBegin}, // CLOSING_BRACE {ErrorBegin}, // CLOSING_BRACKET {ErrorBegin}, // QUOTE {ErrorBegin}, // ESCAPE {ErrorBegin}, // COMMA {ErrorBegin}, // COLON {ErrorBegin}, // WHITE_SPACE nl_tokens({ErrorBegin}, {ErrorBegin}), // LINE_BREAK {ErrorBegin}, // OTHER /*LIST*/ {ErrorBegin}, // OPENING_BRACE {ErrorBegin}, // OPENING_BRACKET {ErrorBegin}, // CLOSING_BRACE {ErrorBegin}, // CLOSING_BRACKET {ErrorBegin}, // QUOTE {ErrorBegin}, // ESCAPE {ErrorBegin}, // COMMA {ErrorBegin}, // COLON {ErrorBegin}, // WHITE_SPACE nl_tokens({ErrorBegin}, {ErrorBegin}), // LINE_BREAK {ErrorBegin}, // OTHER /*STRUCT*/ {ErrorBegin}, // OPENING_BRACE {ErrorBegin}, // OPENING_BRACKET {ErrorBegin}, // CLOSING_BRACE {ErrorBegin}, // CLOSING_BRACKET {ErrorBegin}, // QUOTE {ErrorBegin}, // ESCAPE {ErrorBegin}, // COMMA {}, // COLON {}, // WHITE_SPACE nl_tokens({}, {ErrorBegin}), // LINE_BREAK {ErrorBegin}}}; // OTHER pda_tlt[static_cast<StateT>(pda_state_t::PD_ERR)] = {{ /*ROOT*/ {}, // OPENING_BRACE {}, // OPENING_BRACKET {}, // CLOSING_BRACE {}, // CLOSING_BRACKET {}, // QUOTE {}, // ESCAPE {}, // COMMA {}, // COLON {}, // WHITE_SPACE nl_tokens({}, {}), // LINE_BREAK {}, // OTHER /*LIST*/ {}, // OPENING_BRACE {}, // OPENING_BRACKET {}, // CLOSING_BRACE {}, // CLOSING_BRACKET {}, // QUOTE {}, // ESCAPE {}, // COMMA {}, // COLON {}, // WHITE_SPACE nl_tokens({}, {}), // LINE_BREAK {}, // OTHER /*STRUCT*/ {}, // OPENING_BRACE {}, // OPENING_BRACKET {}, // CLOSING_BRACE {}, // CLOSING_BRACKET {}, // QUOTE {}, // ESCAPE {}, // COMMA {}, // COLON {}, // WHITE_SPACE nl_tokens({}, {}), // LINE_BREAK {}}}; // OTHER return pda_tlt; } } // namespace tokenizer_pda /** * @brief Function object used to filter for brackets and braces that represent push and pop * operations */ struct JSONToStackOp { template <typename StackSymbolT> constexpr CUDF_HOST_DEVICE fst::stack_op_type operator()(StackSymbolT const& stack_symbol) const { switch (stack_symbol) { case '{': case '[': return fst::stack_op_type::PUSH; case '}': case ']': return fst::stack_op_type::POP; default: return fst::stack_op_type::READ; } } }; /** * @brief Function object used to filter for brackets and braces that represent push and pop * operations */ struct JSONWithRecoveryToStackOp { template <typename StackSymbolT> constexpr CUDF_HOST_DEVICE fst::stack_op_type operator()(StackSymbolT const& stack_symbol) const { switch (stack_symbol) { case '{': case '[': return fst::stack_op_type::PUSH; case '}': case ']': return fst::stack_op_type::POP; case '\n': return fst::stack_op_type::RESET; default: return fst::stack_op_type::READ; } } }; void json_column::null_fill(row_offset_t up_to_row_offset) { // Fill all the rows up to up_to_row_offset with "empty"/null rows validity.resize(word_index(up_to_row_offset) + 1); std::fill_n(std::back_inserter(string_offsets), up_to_row_offset - string_offsets.size(), (string_offsets.size() > 0) ? string_offsets.back() : 0); std::fill_n(std::back_inserter(string_lengths), up_to_row_offset - string_lengths.size(), 0); std::fill_n(std::back_inserter(child_offsets), up_to_row_offset + 1 - child_offsets.size(), (child_offsets.size() > 0) ? child_offsets.back() : 0); current_offset = up_to_row_offset; } void json_column::level_child_cols_recursively(row_offset_t min_row_count) { // Fill this columns with nulls up to the given row count null_fill(min_row_count); // If this is a struct column, we need to level all its child columns if (type == json_col_t::StructColumn) { for (auto it = std::begin(child_columns); it != std::end(child_columns); it++) { it->second.level_child_cols_recursively(min_row_count); } } // If this is a list column, we need to make sure that its child column levels its children else if (type == json_col_t::ListColumn) { auto it = std::begin(child_columns); // Make that child column fill its child columns up to its own row count if (it != std::end(child_columns)) { it->second.level_child_cols_recursively(it->second.current_offset); } } }; void json_column::append_row(uint32_t row_index, json_col_t row_type, uint32_t string_offset, uint32_t string_end, uint32_t child_count) { // If, thus far, the column's type couldn't be inferred, we infer it to the given type if (type == json_col_t::Unknown) { type = row_type; } // If, at some point within a column, we encounter a nested type (list or struct), // we change that column's type to that respective nested type and invalidate all previous rows else if (type == json_col_t::StringColumn && (row_type == json_col_t::ListColumn || row_type == json_col_t::StructColumn)) { // Change the column type type = row_type; // Invalidate all previous entries, as they were _not_ of the nested type to which we just // converted std::fill_n(validity.begin(), validity.size(), 0); valid_count = 0U; } // If this is a nested column but we're trying to insert either (a) a list node into a struct // column or (b) a struct node into a list column, we fail CUDF_EXPECTS(not((type == json_col_t::ListColumn and row_type == json_col_t::StructColumn) or (type == json_col_t::StructColumn and row_type == json_col_t::ListColumn)), "A mix of lists and structs within the same column is not supported"); // We shouldn't run into this, as we shouldn't be asked to append an "unknown" row type CUDF_EXPECTS(type != json_col_t::Unknown, "Encountered invalid JSON token sequence"); // Fill all the omitted rows with "empty"/null rows (if needed) null_fill(row_index); // Table listing what we intend to use for a given column type and row type combination // col type | row type => {valid, FAIL, null} // ----------------------------------------------- // List | List => valid // List | Struct => FAIL // List | String => null // Struct | List => FAIL // Struct | Struct => valid // Struct | String => null // String | List => valid (we switch col type to list, null'ing all previous rows) // String | Struct => valid (we switch col type to list, null'ing all previous rows) // String | String => valid bool const is_valid = (type == row_type); if (static_cast<size_type>(validity.size()) < word_index(current_offset)) validity.push_back({}); if (is_valid) { set_bit_unsafe(&validity.back(), intra_word_index(current_offset)); } valid_count += (is_valid) ? 1U : 0U; string_offsets.push_back(string_offset); string_lengths.push_back(string_end - string_offset); child_offsets.push_back((child_offsets.size() > 0) ? child_offsets.back() + child_count : 0); current_offset++; }; namespace detail { void get_stack_context(device_span<SymbolT const> json_in, SymbolT* d_top_of_stack, stack_behavior_t stack_behavior, rmm::cuda_stream_view stream) { check_input_size(json_in.size()); // Range of encapsulating function that comprises: // -> DFA simulation for filtering out brackets and braces inside of quotes // -> Logical stack to infer the stack context CUDF_FUNC_RANGE(); // Symbol representing the JSON-root (i.e., we're at nesting level '0') constexpr StackSymbolT root_symbol = '_'; // This can be any stack symbol from the stack alphabet that does not push onto stack constexpr StackSymbolT read_symbol = 'x'; // Number of stack operations in the input (i.e., number of '{', '}', '[', ']' outside of quotes) rmm::device_scalar<SymbolOffsetT> d_num_stack_ops(stream); // Sequence of stack symbols and their position in the original input (sparse representation) rmm::device_uvector<StackSymbolT> stack_ops{json_in.size(), stream}; rmm::device_uvector<SymbolOffsetT> stack_op_indices{json_in.size(), stream}; // Prepare finite-state transducer that only selects '{', '}', '[', ']' outside of quotes constexpr auto max_translation_table_size = to_stack_op::NUM_SYMBOL_GROUPS * to_stack_op::TT_NUM_STATES; // Transition table specialized on the choice of whether to reset on newlines const auto transition_table = (stack_behavior == stack_behavior_t::ResetOnDelimiter) ? to_stack_op::resetting_transition_table : to_stack_op::transition_table; // Translation table specialized on the choice of whether to reset on newlines const auto translation_table = (stack_behavior == stack_behavior_t::ResetOnDelimiter) ? to_stack_op::resetting_translation_table : to_stack_op::translation_table; auto json_to_stack_ops_fst = fst::detail::make_fst( fst::detail::make_symbol_group_lut(to_stack_op::symbol_groups), fst::detail::make_transition_table(transition_table), fst::detail::make_translation_table<max_translation_table_size>(translation_table), stream); // "Search" for relevant occurrence of brackets and braces that indicate the beginning/end // of structs/lists json_to_stack_ops_fst.Transduce(json_in.begin(), static_cast<SymbolOffsetT>(json_in.size()), stack_ops.data(), stack_op_indices.data(), d_num_stack_ops.data(), to_stack_op::start_state, stream); // Copy back to actual number of stack operations auto const num_stack_ops = d_num_stack_ops.value(stream); // Stack operations with indices are converted to top of the stack for each character in the input if (stack_behavior == stack_behavior_t::ResetOnDelimiter) { fst::sparse_stack_op_to_top_of_stack<fst::stack_op_support::WITH_RESET_SUPPORT, StackLevelT>( stack_ops.data(), device_span<SymbolOffsetT>{stack_op_indices.data(), num_stack_ops}, JSONWithRecoveryToStackOp{}, d_top_of_stack, root_symbol, read_symbol, json_in.size(), stream); } else { fst::sparse_stack_op_to_top_of_stack<fst::stack_op_support::NO_RESET_SUPPORT, StackLevelT>( stack_ops.data(), device_span<SymbolOffsetT>{stack_op_indices.data(), num_stack_ops}, JSONToStackOp{}, d_top_of_stack, root_symbol, read_symbol, json_in.size(), stream); } } std::pair<rmm::device_uvector<PdaTokenT>, rmm::device_uvector<SymbolOffsetT>> process_token_stream( device_span<PdaTokenT const> tokens, device_span<SymbolOffsetT const> token_indices, rmm::cuda_stream_view stream) { // Instantiate FST for post-processing the token stream to remove all tokens that belong to an // invalid JSON line token_filter::UnwrapTokenFromSymbolOp sgid_op{}; auto filter_fst = fst::detail::make_fst(fst::detail::make_symbol_group_lut(token_filter::symbol_groups, sgid_op), fst::detail::make_transition_table(token_filter::transition_table), fst::detail::make_translation_functor(token_filter::TransduceToken{}), stream); auto const mr = rmm::mr::get_current_device_resource(); rmm::device_scalar<SymbolOffsetT> d_num_selected_tokens(stream, mr); rmm::device_uvector<PdaTokenT> filtered_tokens_out{tokens.size(), stream, mr}; rmm::device_uvector<SymbolOffsetT> filtered_token_indices_out{tokens.size(), stream, mr}; // The FST is run on the reverse token stream, discarding all tokens between ErrorBegin and the // next LineEnd (LineEnd, inv_token_0, inv_token_1, ..., inv_token_n, ErrorBegin, LineEnd, ...), // emitting a [StructBegin, StructEnd] pair on the end of such an invalid line. In that example, // inv_token_i for i in [0, n] together with the ErrorBegin are removed and replaced with // StructBegin, StructEnd. Also, all LineEnd are removed as well, as these are not relevant after // this stage anymore filter_fst.Transduce( thrust::make_reverse_iterator(thrust::make_zip_iterator(tokens.data(), token_indices.data()) + tokens.size()), static_cast<SymbolOffsetT>(tokens.size()), thrust::make_reverse_iterator( thrust::make_zip_iterator(filtered_tokens_out.data(), filtered_token_indices_out.data()) + tokens.size()), thrust::make_discard_iterator(), d_num_selected_tokens.data(), token_filter::start_state, stream); auto const num_total_tokens = d_num_selected_tokens.value(stream); rmm::device_uvector<PdaTokenT> tokens_out{num_total_tokens, stream, mr}; rmm::device_uvector<SymbolOffsetT> token_indices_out{num_total_tokens, stream, mr}; thrust::copy(rmm::exec_policy(stream), filtered_tokens_out.end() - num_total_tokens, filtered_tokens_out.end(), tokens_out.data()); thrust::copy(rmm::exec_policy(stream), filtered_token_indices_out.end() - num_total_tokens, filtered_token_indices_out.end(), token_indices_out.data()); return std::make_pair(std::move(tokens_out), std::move(token_indices_out)); } std::pair<rmm::device_uvector<PdaTokenT>, rmm::device_uvector<SymbolOffsetT>> get_token_stream( device_span<SymbolT const> json_in, cudf::io::json_reader_options const& options, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { check_input_size(json_in.size()); // Range of encapsulating function that parses to internal columnar data representation CUDF_FUNC_RANGE(); auto const new_line_delimited_json = options.is_enabled_lines(); // (!new_line_delimited_json) => JSON // (new_line_delimited_json and recover_from_error) => JSON_LINES_RECOVER // (new_line_delimited_json and !recover_from_error) => JSON_LINES auto format = new_line_delimited_json ? (options.recovery_mode() == json_recovery_mode_t::RECOVER_WITH_NULL ? tokenizer_pda::json_format_cfg_t::JSON_LINES_RECOVER : tokenizer_pda::json_format_cfg_t::JSON_LINES) : tokenizer_pda::json_format_cfg_t::JSON; // Prepare for PDA transducer pass, merging input symbols with stack symbols auto const recover_from_error = (format == tokenizer_pda::json_format_cfg_t::JSON_LINES_RECOVER); // Memory holding the top-of-stack stack context for the input rmm::device_uvector<StackSymbolT> stack_symbols{json_in.size(), stream}; // Identify what is the stack context for each input character (JSON-root, struct, or list) auto const stack_behavior = recover_from_error ? stack_behavior_t::ResetOnDelimiter : stack_behavior_t::PushPopWithoutReset; get_stack_context(json_in, stack_symbols.data(), stack_behavior, stream); // Input to the full pushdown automaton finite-state transducer, where a input symbol comprises // the combination of a character from the JSON input together with the stack context for that // character. auto zip_in = thrust::make_zip_iterator(json_in.data(), stack_symbols.data()); // Spark, as the main stakeholder in the `recover_from_error` option, has the specific need to // ignore any characters that follow the first value on each JSON line. This is an FST that // fixes the stack context for those excess characters. That is, that all those excess characters // will be interpreted in the root stack context if (recover_from_error) { auto fix_stack_of_excess_chars = fst::detail::make_fst( fst::detail::make_symbol_group_lookup_op( fix_stack_of_excess_chars::SymbolPairToSymbolGroupId{}), fst::detail::make_transition_table(fix_stack_of_excess_chars::transition_table), fst::detail::make_translation_functor(fix_stack_of_excess_chars::TransduceInputOp{}), stream); fix_stack_of_excess_chars.Transduce(zip_in, static_cast<SymbolOffsetT>(json_in.size()), stack_symbols.data(), thrust::make_discard_iterator(), thrust::make_discard_iterator(), fix_stack_of_excess_chars::start_state, stream); } constexpr auto max_translation_table_size = tokenizer_pda::NUM_PDA_SGIDS * static_cast<tokenizer_pda::StateT>(tokenizer_pda::pda_state_t::PD_NUM_STATES); auto json_to_tokens_fst = fst::detail::make_fst( fst::detail::make_symbol_group_lookup_op(tokenizer_pda::PdaSymbolToSymbolGroupId{}), fst::detail::make_transition_table(tokenizer_pda::get_transition_table(format)), fst::detail::make_translation_table<max_translation_table_size>( tokenizer_pda::get_translation_table(recover_from_error)), stream); // Perform a PDA-transducer pass // Compute the maximum amount of tokens that can possibly be emitted for a given input size // Worst case ratio of tokens per input char is given for a struct with an empty field name, that // may be arbitrarily deeply nested: {"":_}, where '_' is a placeholder for any JSON value, // possibly another such struct. That is, 6 tokens for 5 chars (plus chars and tokens of '_') std::size_t constexpr min_chars_per_struct = 5; std::size_t constexpr max_tokens_per_struct = 6; auto const max_token_out_count = cudf::util::div_rounding_up_safe(json_in.size(), min_chars_per_struct) * max_tokens_per_struct; rmm::device_scalar<std::size_t> num_written_tokens{stream}; // In case we're recovering on invalid JSON lines, post-processing the token stream requires to // see a JSON-line delimiter as the very first item SymbolOffsetT const delimiter_offset = (format == tokenizer_pda::json_format_cfg_t::JSON_LINES_RECOVER ? 1 : 0); rmm::device_uvector<PdaTokenT> tokens{max_token_out_count + delimiter_offset, stream, mr}; rmm::device_uvector<SymbolOffsetT> tokens_indices{ max_token_out_count + delimiter_offset, stream, mr}; json_to_tokens_fst.Transduce(zip_in, static_cast<SymbolOffsetT>(json_in.size()), tokens.data() + delimiter_offset, tokens_indices.data() + delimiter_offset, num_written_tokens.data(), tokenizer_pda::start_state, stream); auto const num_total_tokens = num_written_tokens.value(stream) + delimiter_offset; tokens.resize(num_total_tokens, stream); tokens_indices.resize(num_total_tokens, stream); if (delimiter_offset == 1) { tokens.set_element(0, token_t::LineEnd, stream); auto [filtered_tokens, filtered_tokens_indices] = process_token_stream(tokens, tokens_indices, stream); tokens = std::move(filtered_tokens); tokens_indices = std::move(filtered_tokens_indices); } CUDF_EXPECTS(num_total_tokens <= max_token_out_count, "Generated token count exceeds the expected token count"); return std::make_pair(std::move(tokens), std::move(tokens_indices)); } /** * @brief Parses the given JSON string and generates a tree representation of the given input. * * @param[in,out] root_column The root column of the hierarchy of columns into which data is parsed * @param[in,out] current_data_path The stack represents the path from the JSON root node to the * first node encountered in \p input * @param[in] input The JSON input in host memory * @param[in] d_input The JSON input in device memory * @param[in] options Parsing options specifying the parsing behaviour * @param[in] include_quote_char Whether to include the original quote chars around string values, * allowing to distinguish string values from numeric and literal values * @param[in] stream The CUDA stream to which kernels are dispatched * @param[in] mr Optional, resource with which to allocate * @return The columnar representation of the data from the given JSON input */ void make_json_column(json_column& root_column, std::stack<tree_node>& current_data_path, host_span<SymbolT const> input, device_span<SymbolT const> d_input, cudf::io::json_reader_options const& options, bool include_quote_char, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { // Range of encapsulating function that parses to internal columnar data representation CUDF_FUNC_RANGE(); // Parse the JSON and get the token stream auto const [d_tokens_gpu, d_token_indices_gpu] = get_token_stream(d_input, options, stream, mr); // Copy the JSON tokens to the host thrust::host_vector<PdaTokenT> tokens = cudf::detail::make_host_vector_async(d_tokens_gpu, stream); thrust::host_vector<SymbolOffsetT> token_indices_gpu = cudf::detail::make_host_vector_async(d_token_indices_gpu, stream); // Make sure tokens have been copied to the host stream.synchronize(); // Whether this token is the valid token to begin the JSON document with auto is_valid_root_token = [](PdaTokenT const token) { switch (token) { case token_t::StructBegin: case token_t::ListBegin: case token_t::StringBegin: case token_t::ValueBegin: return true; default: return false; }; }; // Returns the token's corresponding column type auto token_to_column_type = [](PdaTokenT const token) { switch (token) { case token_t::StructBegin: return json_col_t::StructColumn; case token_t::ListBegin: return json_col_t::ListColumn; case token_t::StringBegin: return json_col_t::StringColumn; case token_t::ValueBegin: return json_col_t::StringColumn; default: return json_col_t::Unknown; }; }; // Depending on whether we want to include the quotes of strings or not, respectively, we: // (a) strip off the beginning quote included in StringBegin and FieldNameBegin or // (b) include of the end quote excluded from in StringEnd and strip off the beginning quote // included FieldNameBegin auto get_token_index = [include_quote_char](PdaTokenT const token, SymbolOffsetT const token_index) { constexpr SymbolOffsetT quote_char_size = 1; switch (token) { // Optionally strip off quote char included for StringBegin case token_t::StringBegin: return token_index + (include_quote_char ? 0 : quote_char_size); // Optionally include trailing quote char for string values excluded for StringEnd case token_t::StringEnd: return token_index + (include_quote_char ? quote_char_size : 0); // Strip off quote char included for FieldNameBegin case token_t::FieldNameBegin: return token_index + quote_char_size; default: return token_index; }; }; // The end-of-* partner token for a given beginning-of-* token auto end_of_partner = [](PdaTokenT const token) { switch (token) { case token_t::StringBegin: return token_t::StringEnd; case token_t::ValueBegin: return token_t::ValueEnd; case token_t::FieldNameBegin: return token_t::FieldNameEnd; default: return token_t::ErrorBegin; }; }; #ifdef NJP_DEBUG_PRINT auto column_type_string = [](json_col_t column_type) { switch (column_type) { case json_col_t::Unknown: return "Unknown"; case json_col_t::ListColumn: return "List"; case json_col_t::StructColumn: return "Struct"; case json_col_t::StringColumn: return "String"; default: return "Unknown"; } }; auto token_to_string = [](PdaTokenT token_type) { switch (token_type) { case token_t::StructBegin: return "StructBegin"; case token_t::StructEnd: return "StructEnd"; case token_t::ListBegin: return "ListBegin"; case token_t::ListEnd: return "ListEnd"; case token_t::StructMemberBegin: return "StructMemberBegin"; case token_t::StructMemberEnd: return "StructMemberEnd"; case token_t::FieldNameBegin: return "FieldNameBegin"; case token_t::FieldNameEnd: return "FieldNameEnd"; case token_t::StringBegin: return "StringBegin"; case token_t::StringEnd: return "StringEnd"; case token_t::ValueBegin: return "ValueBegin"; case token_t::ValueEnd: return "ValueEnd"; case token_t::ErrorBegin: return "ErrorBegin"; case token_t::LineEnd: return "LineEnd"; default: return "Unknown"; } }; #endif /** * @brief Updates the given row in the given column with a new string_end and child_count. In * particular, updating the child count is relevant for list columns. */ auto update_row = [](json_column* column, uint32_t row_index, uint32_t string_end, uint32_t child_count) { #ifdef NJP_DEBUG_PRINT std::cout << " -> update_row()\n"; std::cout << " ---> col@" << column << "\n"; std::cout << " ---> row #" << row_index << "\n"; std::cout << " ---> string_lengths = " << (string_end - column->string_offsets[row_index]) << "\n"; std::cout << " ---> child_offsets = " << (column->child_offsets[row_index + 1] + child_count) << "\n"; #endif column->string_lengths[row_index] = column->child_offsets[row_index + 1] + child_count; column->child_offsets[row_index + 1] = column->child_offsets[row_index + 1] + child_count; }; /** * @brief Gets the currently selected child column given a \p current_data_path. * * That is, if \p current_data_path top-of-stack is * (a) a struct, the selected child column corresponds to the child column of the last field name * node encountered. * (b) a list, the selected child column corresponds to single child column of * the list column. In this case, the child column may not exist yet. */ auto get_selected_column = [](std::stack<tree_node>& current_data_path) { json_column* selected_col = current_data_path.top().current_selected_col; // If the node does not have a selected column yet if (selected_col == nullptr) { // We're looking at the child column of a list column if (current_data_path.top().column->type == json_col_t::ListColumn) { CUDF_EXPECTS(current_data_path.top().column->child_columns.size() <= 1, "Encountered a list column with more than a single child column"); // The child column has yet to be created if (current_data_path.top().column->child_columns.empty()) { current_data_path.top().column->child_columns.emplace(std::string{list_child_name}, json_column{json_col_t::Unknown}); current_data_path.top().column->column_order.push_back(list_child_name); } current_data_path.top().current_selected_col = &current_data_path.top().column->child_columns.begin()->second; selected_col = current_data_path.top().current_selected_col; } else { CUDF_FAIL("Trying to retrieve child column without encountering a field name."); } } #ifdef NJP_DEBUG_PRINT std::cout << " -> get_selected_column()\n"; std::cout << " ---> selected col@" << selected_col << "\n"; #endif return selected_col; }; /** * @brief Returns a pointer to the child column with the given \p field_name within the current * struct column. */ auto select_column = [](std::stack<tree_node>& current_data_path, std::string const& field_name) { #ifdef NJP_DEBUG_PRINT std::cout << " -> select_column(" << field_name << ")\n"; #endif // The field name's parent struct node auto& current_struct_node = current_data_path.top(); // Verify that the field name node is actually a child of a struct CUDF_EXPECTS(current_data_path.top().column->type == json_col_t::StructColumn, "Invalid JSON token sequence"); json_column* struct_col = current_struct_node.column; auto const& child_col_it = struct_col->child_columns.find(field_name); // The field name's column exists already, select that as the struct node's currently selected // child column if (child_col_it != struct_col->child_columns.end()) { return &child_col_it->second; } // The field name's column does not exist yet, so we have to append the child column to the // struct column struct_col->column_order.push_back(field_name); return &struct_col->child_columns.emplace(field_name, json_column{}).first->second; }; /** * @brief Gets the row offset at which to insert. I.e., for a child column of a list column, we * just have to append the row to the end. Otherwise we have to propagate the row offset from the * parent struct column. */ auto get_target_row_index = [](std::stack<tree_node> const& current_data_path, json_column* target_column) { #ifdef NJP_DEBUG_PRINT std::cout << " -> target row: " << ((current_data_path.top().column->type == json_col_t::ListColumn) ? target_column->current_offset : current_data_path.top().row_index) << "\n"; #endif return (current_data_path.top().column->type == json_col_t::ListColumn) ? target_column->current_offset : current_data_path.top().row_index; }; // The offset of the token currently being processed std::size_t offset = 0; // Giving names to magic constants constexpr uint32_t zero_child_count = 0; CUDF_EXPECTS(tokens.size() == token_indices_gpu.size(), "Unexpected mismatch in number of token types and token indices"); CUDF_EXPECTS(tokens.size() > 0, "Empty JSON input not supported"); // The JSON root may only be a struct, list, string, or value node CUDF_EXPECTS(is_valid_root_token(tokens[offset]), "Invalid beginning of JSON document"); while (offset < tokens.size()) { // Verify there's at least the JSON root node left on the stack to which we can append data CUDF_EXPECTS(current_data_path.size() > 0, "Invalid JSON structure"); // Verify that the current node in the tree (which becomes this nodes parent) can have children CUDF_EXPECTS(current_data_path.top().column->type == json_col_t::ListColumn or current_data_path.top().column->type == json_col_t::StructColumn, "Invalid JSON structure"); // The token we're currently parsing auto const& token = tokens[offset]; #ifdef NJP_DEBUG_PRINT std::cout << "[" << token_to_string(token) << "]\n"; #endif // StructBegin token if (token == token_t::StructBegin) { // Get this node's column. That is, the parent node's selected column: // (a) if parent is a list, then this will (create and) return the list's only child column // (b) if parent is a struct, then this will return the column selected by the last field name // encountered. json_column* selected_col = get_selected_column(current_data_path); // Get the row offset at which to insert auto const target_row_index = get_target_row_index(current_data_path, selected_col); // Increment parent's child count and insert this struct node into the data path current_data_path.top().num_children++; current_data_path.push({selected_col, target_row_index, nullptr, zero_child_count}); // Add this struct node to the current column selected_col->append_row(target_row_index, token_to_column_type(tokens[offset]), get_token_index(tokens[offset], token_indices_gpu[offset]), get_token_index(tokens[offset], token_indices_gpu[offset]), zero_child_count); } // StructEnd token else if (token == token_t::StructEnd) { // Verify that this node in fact a struct node (i.e., it was part of a struct column) CUDF_EXPECTS(current_data_path.top().column->type == json_col_t::StructColumn, "Broken invariant while parsing JSON"); CUDF_EXPECTS(current_data_path.top().column != nullptr, "Broken invariant while parsing JSON"); // Update row to account for string offset update_row(current_data_path.top().column, current_data_path.top().row_index, get_token_index(tokens[offset], token_indices_gpu[offset]), current_data_path.top().num_children); // Pop struct from the path stack current_data_path.pop(); } // ListBegin token else if (token == token_t::ListBegin) { // Get the selected column json_column* selected_col = get_selected_column(current_data_path); // Get the row offset at which to insert auto const target_row_index = get_target_row_index(current_data_path, selected_col); // Increment parent's child count and insert this struct node into the data path current_data_path.top().num_children++; current_data_path.push({selected_col, target_row_index, nullptr, zero_child_count}); // Add this struct node to the current column selected_col->append_row(target_row_index, token_to_column_type(tokens[offset]), get_token_index(tokens[offset], token_indices_gpu[offset]), get_token_index(tokens[offset], token_indices_gpu[offset]), zero_child_count); } // ListEnd token else if (token == token_t::ListEnd) { // Verify that this node in fact a list node (i.e., it was part of a list column) CUDF_EXPECTS(current_data_path.top().column->type == json_col_t::ListColumn, "Broken invariant while parsing JSON"); CUDF_EXPECTS(current_data_path.top().column != nullptr, "Broken invariant while parsing JSON"); // Update row to account for string offset update_row(current_data_path.top().column, current_data_path.top().row_index, get_token_index(tokens[offset], token_indices_gpu[offset]), current_data_path.top().num_children); // Pop list from the path stack current_data_path.pop(); } // Error token else if (token == token_t::ErrorBegin) { #ifdef NJP_DEBUG_PRINT std::cout << "[ErrorBegin]\n"; std::cout << "@" << get_token_index(tokens[offset], token_indices_gpu[offset]); #endif CUDF_FAIL("Parser encountered an invalid format."); } // FieldName, String, or Value (begin, end)-pair else if (token == token_t::FieldNameBegin or token == token_t::StringBegin or token == token_t::ValueBegin) { // Verify that this token has the right successor to build a correct (being, end) token pair CUDF_EXPECTS((offset + 1) < tokens.size(), "Invalid JSON token sequence"); CUDF_EXPECTS(tokens[offset + 1] == end_of_partner(token), "Invalid JSON token sequence"); // The offset to the first symbol from the JSON input associated with the current token auto const& token_begin_offset = get_token_index(tokens[offset], token_indices_gpu[offset]); // The offset to one past the last symbol associated with the current token auto const& token_end_offset = get_token_index(tokens[offset + 1], token_indices_gpu[offset + 1]); // FieldNameBegin // For the current struct node in the tree, select the child column corresponding to this // field name if (token == token_t::FieldNameBegin) { std::string field_name{input.data() + token_begin_offset, (token_end_offset - token_begin_offset)}; current_data_path.top().current_selected_col = select_column(current_data_path, field_name); } // StringBegin // ValueBegin // As we currently parse to string columns there's no further differentiation else if (token == token_t::StringBegin or token == token_t::ValueBegin) { // Get the selected column json_column* selected_col = get_selected_column(current_data_path); // Get the row offset at which to insert auto const target_row_index = get_target_row_index(current_data_path, selected_col); current_data_path.top().num_children++; selected_col->append_row(target_row_index, token_to_column_type(token), token_begin_offset, token_end_offset, zero_child_count); } else { CUDF_FAIL("Unknown JSON token"); } // As we've also consumed the end-of-* token, we advance the processed token offset by one offset++; } offset++; } // Make sure all of a struct's child columns have the same length root_column.level_child_cols_recursively(root_column.current_offset); } /** * @brief Retrieves the parse_options to be used for type inference and type casting * * @param options The reader options to influence the relevant type inference and type casting * options * @param stream The CUDA stream to which kernels are dispatched */ auto parsing_options(cudf::io::json_reader_options const& options, rmm::cuda_stream_view stream) { auto parse_opts = cudf::io::parse_options{',', '\n', '\"', '.'}; parse_opts.dayfirst = options.is_enabled_dayfirst(); parse_opts.keepquotes = options.is_enabled_keep_quotes(); parse_opts.trie_true = cudf::detail::create_serialized_trie({"true"}, stream); parse_opts.trie_false = cudf::detail::create_serialized_trie({"false"}, stream); parse_opts.trie_na = cudf::detail::create_serialized_trie({"", "null"}, stream); return parse_opts; } std::pair<std::unique_ptr<column>, std::vector<column_name_info>> json_column_to_cudf_column( json_column const& json_col, device_span<SymbolT const> d_input, cudf::io::json_reader_options const& options, std::optional<schema_element> schema, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { // Range of orchestrating/encapsulating function CUDF_FUNC_RANGE(); auto make_validity = [stream, mr](json_column const& json_col) -> std::pair<rmm::device_buffer, size_type> { return {rmm::device_buffer{json_col.validity.data(), bitmask_allocation_size_bytes(json_col.current_offset), stream, mr}, json_col.current_offset - json_col.valid_count}; }; auto get_child_schema = [schema](auto child_name) -> std::optional<schema_element> { if (schema.has_value()) { auto const result = schema.value().child_types.find(child_name); if (result != std::end(schema.value().child_types)) { return result->second; } } return {}; }; switch (json_col.type) { case json_col_t::StringColumn: { auto const col_size = json_col.string_offsets.size(); CUDF_EXPECTS(json_col.string_offsets.size() == json_col.string_lengths.size(), "string offset, string length mismatch"); // Move string_offsets and string_lengths to GPU rmm::device_uvector<json_column::row_offset_t> d_string_offsets = cudf::detail::make_device_uvector_async( json_col.string_offsets, stream, rmm::mr::get_current_device_resource()); rmm::device_uvector<json_column::row_offset_t> d_string_lengths = cudf::detail::make_device_uvector_async( json_col.string_lengths, stream, rmm::mr::get_current_device_resource()); // Prepare iterator that returns (string_offset, string_length)-tuples auto offset_length_it = thrust::make_zip_iterator(d_string_offsets.begin(), d_string_lengths.begin()); data_type target_type{}; if (schema.has_value()) { #ifdef NJP_DEBUG_PRINT std::cout << "-> explicit type: " << (schema.has_value() ? std::to_string(static_cast<int>(schema->type.id())) : "n/a"); #endif target_type = schema.value().type; } // Infer column type, if we don't have an explicit type for it else { target_type = cudf::io::detail::infer_data_type(parsing_options(options, stream).json_view(), d_input, offset_length_it, col_size, stream); } auto [result_bitmask, null_count] = make_validity(json_col); // Convert strings to the inferred data type auto col = parse_data(d_input.data(), offset_length_it, col_size, target_type, std::move(result_bitmask), null_count, parsing_options(options, stream).view(), stream, mr); // Reset nullable if we do not have nulls // This is to match the existing JSON reader's behaviour: // - Non-string columns will always be returned as nullable // - String columns will be returned as nullable, iff there's at least one null entry if (target_type.id() == type_id::STRING and col->null_count() == 0) { col->set_null_mask(rmm::device_buffer{0, stream, mr}, 0); } // For string columns return ["offsets", "char"] schema if (target_type.id() == type_id::STRING) { return {std::move(col), std::vector<column_name_info>{{"offsets"}, {"chars"}}}; } // Non-string leaf-columns (e.g., numeric) do not have child columns in the schema else { return {std::move(col), std::vector<column_name_info>{}}; } break; } case json_col_t::StructColumn: { std::vector<std::unique_ptr<column>> child_columns; std::vector<column_name_info> column_names{}; size_type num_rows{json_col.current_offset}; // Create children columns for (auto const& col_name : json_col.column_order) { auto const& col = json_col.child_columns.find(col_name); column_names.emplace_back(col->first); auto const& child_col = col->second; auto [child_column, names] = json_column_to_cudf_column( child_col, d_input, options, get_child_schema(col_name), stream, mr); CUDF_EXPECTS(num_rows == child_column->size(), "All children columns must have the same size"); child_columns.push_back(std::move(child_column)); column_names.back().children = names; } auto [result_bitmask, null_count] = make_validity(json_col); return { make_structs_column( num_rows, std::move(child_columns), null_count, std::move(result_bitmask), stream, mr), column_names}; break; } case json_col_t::ListColumn: { size_type num_rows = json_col.child_offsets.size(); std::vector<column_name_info> column_names{}; column_names.emplace_back("offsets"); column_names.emplace_back( json_col.child_columns.empty() ? list_child_name : json_col.child_columns.begin()->first); rmm::device_uvector<json_column::row_offset_t> d_offsets = cudf::detail::make_device_uvector_async(json_col.child_offsets, stream, mr); auto offsets_column = std::make_unique<column>( data_type{type_id::INT32}, num_rows, d_offsets.release(), rmm::device_buffer{}, 0); // Create children column auto [child_column, names] = json_col.child_columns.empty() ? std::pair<std::unique_ptr<column>, std::vector<column_name_info>>{std::make_unique<column>(), std::vector<column_name_info>{}} : json_column_to_cudf_column(json_col.child_columns.begin()->second, d_input, options, get_child_schema(json_col.child_columns.begin()->first), stream, mr); column_names.back().children = names; auto [result_bitmask, null_count] = make_validity(json_col); return {make_lists_column(num_rows - 1, std::move(offsets_column), std::move(child_column), null_count, std::move(result_bitmask), stream, mr), std::move(column_names)}; break; } default: CUDF_FAIL("Unsupported column type, yet to be implemented"); break; } return {}; } table_with_metadata host_parse_nested_json(device_span<SymbolT const> d_input, cudf::io::json_reader_options const& options, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { // Range of orchestrating/encapsulating function CUDF_FUNC_RANGE(); auto const h_input = cudf::detail::make_std_vector_async(d_input, stream); auto const new_line_delimited_json = options.is_enabled_lines(); // Get internal JSON column json_column root_column{}; std::stack<tree_node> data_path{}; constexpr uint32_t row_offset_zero = 0; constexpr uint32_t token_begin_offset_zero = 0; constexpr uint32_t token_end_offset_zero = 0; constexpr uint32_t node_init_child_count_zero = 0; // Whether the tokenizer stage should keep quote characters for string values // If the tokenizer keeps the quote characters, they may be stripped during type casting constexpr bool include_quote_chars = true; // We initialize the very root node and root column, which represent the JSON document being // parsed. That root node is a list node and that root column is a list column. The column has the // root node as its only row. The values parsed from the JSON input will be treated as follows: // (1) For JSON lines: we expect to find a list of JSON values that all // will be inserted into this root list column. (2) For regular JSON: we expect to have only a // single value (list, struct, string, number, literal) that will be inserted into this root // column. root_column.append_row( row_offset_zero, json_col_t::ListColumn, token_begin_offset_zero, token_end_offset_zero, 1); // Push the root node onto the stack for the data path data_path.push({&root_column, row_offset_zero, nullptr, node_init_child_count_zero}); make_json_column( root_column, data_path, h_input, d_input, options, include_quote_chars, stream, mr); // data_root refers to the root column of the data represented by the given JSON string auto const& data_root = new_line_delimited_json ? root_column : root_column.child_columns.begin()->second; // Zero row entries if (data_root.type == json_col_t::ListColumn && data_root.child_columns.empty()) { return table_with_metadata{std::make_unique<table>(std::vector<std::unique_ptr<column>>{})}; } // Verify that we were in fact given a list of structs (or in JSON speech: an array of objects) auto constexpr single_child_col_count = 1; CUDF_EXPECTS(data_root.type == json_col_t::ListColumn and data_root.child_columns.size() == single_child_col_count and data_root.child_columns.begin()->second.type == json_col_t::StructColumn, "Currently the nested JSON parser only supports an array of (nested) objects"); // Slice off the root list column, which has only a single row that contains all the structs auto const& root_struct_col = data_root.child_columns.begin()->second; // Initialize meta data to be populated while recursing through the tree of columns std::vector<std::unique_ptr<column>> out_columns; std::vector<column_name_info> out_column_names; // Iterate over the struct's child columns and convert to cudf column size_type column_index = 0; for (auto const& col_name : root_struct_col.column_order) { auto const& json_col = root_struct_col.child_columns.find(col_name)->second; // Insert this columns name into the schema out_column_names.emplace_back(col_name); std::optional<schema_element> child_schema_element = std::visit( cudf::detail::visitor_overload{ [column_index](std::vector<data_type> const& user_dtypes) -> std::optional<schema_element> { auto ret = (static_cast<std::size_t>(column_index) < user_dtypes.size()) ? std::optional<schema_element>{{user_dtypes[column_index]}} : std::optional<schema_element>{}; #ifdef NJP_DEBUG_PRINT std::cout << "Column by index: #" << column_index << ", type id: " << (ret.has_value() ? std::to_string(static_cast<int>(ret->type.id())) : "n/a") << ", with " << (ret.has_value() ? ret->child_types.size() : 0) << " children" << "\n"; #endif return ret; }, [col_name]( std::map<std::string, data_type> const& user_dtypes) -> std::optional<schema_element> { auto ret = (user_dtypes.find(col_name) != std::end(user_dtypes)) ? std::optional<schema_element>{{user_dtypes.find(col_name)->second}} : std::optional<schema_element>{}; #ifdef NJP_DEBUG_PRINT std::cout << "Column by flat name: '" << col_name << "', type id: " << (ret.has_value() ? std::to_string(static_cast<int>(ret->type.id())) : "n/a") << ", with " << (ret.has_value() ? ret->child_types.size() : 0) << " children" << "\n"; #endif return ret; }, [col_name](std::map<std::string, schema_element> const& user_dtypes) -> std::optional<schema_element> { auto ret = (user_dtypes.find(col_name) != std::end(user_dtypes)) ? user_dtypes.find(col_name)->second : std::optional<schema_element>{}; #ifdef NJP_DEBUG_PRINT std::cout << "Column by nested name: #" << col_name << ", type id: " << (ret.has_value() ? std::to_string(static_cast<int>(ret->type.id())) : "n/a") << ", with " << (ret.has_value() ? ret->child_types.size() : 0) << " children" << "\n"; #endif return ret; }}, options.get_dtypes()); // Get this JSON column's cudf column and schema info auto [cudf_col, col_name_info] = json_column_to_cudf_column(json_col, d_input, options, child_schema_element, stream, mr); out_column_names.back().children = std::move(col_name_info); out_columns.emplace_back(std::move(cudf_col)); column_index++; } return table_with_metadata{std::make_unique<table>(std::move(out_columns)), {out_column_names}}; } } // namespace detail } // namespace cudf::io::json // Debug print flag #undef NJP_DEBUG_PRINT
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/json/byte_range_info.cu
/* * Copyright (c) 2022-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include <cudf/types.hpp> #include <cudf/utilities/span.hpp> #include <rmm/cuda_stream_view.hpp> #include <rmm/exec_policy.hpp> #include <thrust/find.h> namespace cudf::io::json::detail { // Extract the first character position in the string. size_type find_first_delimiter(device_span<char const> d_data, char const delimiter, rmm::cuda_stream_view stream) { auto const first_delimiter_position = thrust::find(rmm::exec_policy(stream), d_data.begin(), d_data.end(), delimiter); return first_delimiter_position != d_data.end() ? first_delimiter_position - d_data.begin() : -1; } } // namespace cudf::io::json::detail
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/json/read_json.hpp
/* * Copyright (c) 2022-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #pragma once #include <cudf/io/datasource.hpp> #include <cudf/io/json.hpp> #include <cudf/types.hpp> #include <cudf/utilities/span.hpp> #include <rmm/cuda_stream_view.hpp> #include <rmm/mr/device/device_memory_resource.hpp> #include <memory> namespace cudf::io::json::detail { table_with_metadata read_json(host_span<std::unique_ptr<datasource>> sources, json_reader_options const& reader_opts, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr); size_type find_first_delimiter(device_span<char const> d_data, char const delimiter, rmm::cuda_stream_view stream); size_type find_first_delimiter_in_chunk(host_span<std::unique_ptr<cudf::io::datasource>> sources, json_reader_options const& reader_opts, char const delimiter, rmm::cuda_stream_view stream); } // namespace cudf::io::json::detail
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/json/json_tree.cu
/* * Copyright (c) 2022-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "nested_json.hpp" #include <io/utilities/hostdevice_vector.hpp> #include <cudf/detail/nvtx/ranges.hpp> #include <cudf/detail/scatter.cuh> #include <cudf/detail/utilities/algorithm.cuh> #include <cudf/detail/utilities/vector_factories.hpp> #include <cudf/hashing/detail/default_hash.cuh> #include <cudf/hashing/detail/hash_allocator.cuh> #include <cudf/hashing/detail/hashing.hpp> #include <cudf/hashing/detail/helper_functions.cuh> #include <cudf/utilities/error.hpp> #include <cudf/utilities/span.hpp> #include <rmm/cuda_stream_view.hpp> #include <rmm/device_uvector.hpp> #include <rmm/exec_policy.hpp> #include <rmm/mr/device/polymorphic_allocator.hpp> #include <cub/device/device_radix_sort.cuh> #include <cuco/static_set.cuh> #include <thrust/binary_search.h> #include <thrust/copy.h> #include <thrust/count.h> #include <thrust/fill.h> #include <thrust/gather.h> #include <thrust/iterator/counting_iterator.h> #include <thrust/iterator/discard_iterator.h> #include <thrust/iterator/permutation_iterator.h> #include <thrust/iterator/transform_output_iterator.h> #include <thrust/iterator/zip_iterator.h> #include <thrust/reduce.h> #include <thrust/remove.h> #include <thrust/scan.h> #include <thrust/sequence.h> #include <thrust/sort.h> #include <thrust/tabulate.h> #include <thrust/transform.h> #include <limits> namespace cudf::io::json { namespace detail { // The node that a token represents struct token_to_node { __device__ auto operator()(PdaTokenT const token) -> NodeT { switch (token) { case token_t::StructBegin: return NC_STRUCT; case token_t::ListBegin: return NC_LIST; case token_t::StringBegin: return NC_STR; case token_t::ValueBegin: return NC_STR; // NC_VAL; // NV_VAL is removed because type inference and // reduce_to_column_tree category collapsing takes care of this. case token_t::FieldNameBegin: return NC_FN; default: return NC_ERR; }; } }; // Convert token indices to node range for each valid node. struct node_ranges { device_span<PdaTokenT const> tokens; device_span<SymbolOffsetT const> token_indices; bool include_quote_char; __device__ auto operator()(size_type i) -> thrust::tuple<SymbolOffsetT, SymbolOffsetT> { // Whether a token expects to be followed by its respective end-of-* token partner auto const is_begin_of_section = [] __device__(PdaTokenT const token) { switch (token) { case token_t::StringBegin: case token_t::ValueBegin: case token_t::FieldNameBegin: return true; default: return false; }; }; // The end-of-* partner token for a given beginning-of-* token auto const end_of_partner = [] __device__(PdaTokenT const token) { switch (token) { case token_t::StringBegin: return token_t::StringEnd; case token_t::ValueBegin: return token_t::ValueEnd; case token_t::FieldNameBegin: return token_t::FieldNameEnd; default: return token_t::ErrorBegin; }; }; // Includes quote char for end-of-string token or Skips the quote char for // beginning-of-field-name token auto const get_token_index = [include_quote_char = include_quote_char] __device__( PdaTokenT const token, SymbolOffsetT const token_index) { constexpr SymbolOffsetT quote_char_size = 1; switch (token) { // Strip off quote char included for StringBegin case token_t::StringBegin: return token_index + (include_quote_char ? 0 : quote_char_size); // Strip off or Include trailing quote char for string values for StringEnd case token_t::StringEnd: return token_index + (include_quote_char ? quote_char_size : 0); // Strip off quote char included for FieldNameBegin case token_t::FieldNameBegin: return token_index + quote_char_size; default: return token_index; }; }; PdaTokenT const token = tokens[i]; // The section from the original JSON input that this token demarcates SymbolOffsetT range_begin = get_token_index(token, token_indices[i]); SymbolOffsetT range_end = range_begin + 1; // non-leaf, non-field nodes ignore this value. if (is_begin_of_section(token)) { if ((i + 1) < tokens.size() && end_of_partner(token) == tokens[i + 1]) { // Update the range_end for this pair of tokens range_end = get_token_index(tokens[i + 1], token_indices[i + 1]); } } return thrust::make_tuple(range_begin, range_end); } }; /** * @brief Returns stable sorted keys and its sorted order * * Uses cub stable radix sort. The order is internally generated, hence it saves a copy and memory. * Since the key and order is returned, using double buffer helps to avoid extra copy to user * provided output iterator. * * @tparam IndexType sorted order type * @tparam KeyType key type * @param keys keys to sort * @param stream CUDA stream used for device memory operations and kernel launches. * @return Sorted keys and indices producing that sorted order */ template <typename IndexType = size_t, typename KeyType> std::pair<rmm::device_uvector<KeyType>, rmm::device_uvector<IndexType>> stable_sorted_key_order( cudf::device_span<KeyType const> keys, rmm::cuda_stream_view stream) { CUDF_FUNC_RANGE(); // Determine temporary device storage requirements rmm::device_uvector<KeyType> keys_buffer1(keys.size(), stream); rmm::device_uvector<KeyType> keys_buffer2(keys.size(), stream); rmm::device_uvector<IndexType> order_buffer1(keys.size(), stream); rmm::device_uvector<IndexType> order_buffer2(keys.size(), stream); cub::DoubleBuffer<IndexType> order_buffer(order_buffer1.data(), order_buffer2.data()); cub::DoubleBuffer<KeyType> keys_buffer(keys_buffer1.data(), keys_buffer2.data()); size_t temp_storage_bytes = 0; cub::DeviceRadixSort::SortPairs( nullptr, temp_storage_bytes, keys_buffer, order_buffer, keys.size()); rmm::device_buffer d_temp_storage(temp_storage_bytes, stream); thrust::copy(rmm::exec_policy(stream), keys.begin(), keys.end(), keys_buffer1.begin()); thrust::sequence(rmm::exec_policy(stream), order_buffer1.begin(), order_buffer1.end()); cub::DeviceRadixSort::SortPairs(d_temp_storage.data(), temp_storage_bytes, keys_buffer, order_buffer, keys.size(), 0, sizeof(KeyType) * 8, stream.value()); return std::pair{keys_buffer.Current() == keys_buffer1.data() ? std::move(keys_buffer1) : std::move(keys_buffer2), order_buffer.Current() == order_buffer1.data() ? std::move(order_buffer1) : std::move(order_buffer2)}; } /** * @brief Propagate parent node to siblings from first sibling. * * @param node_levels Node levels of each node * @param parent_node_ids parent node ids initialized for first child of each push node, * and other siblings are initialized to -1. * @param stream CUDA stream used for device memory operations and kernel launches. */ void propagate_parent_to_siblings(cudf::device_span<TreeDepthT const> node_levels, cudf::device_span<NodeIndexT> parent_node_ids, rmm::cuda_stream_view stream) { CUDF_FUNC_RANGE(); auto [sorted_node_levels, sorted_order] = stable_sorted_key_order<size_type>(node_levels, stream); // instead of gather, using permutation_iterator, which is ~17% faster thrust::inclusive_scan_by_key( rmm::exec_policy(stream), sorted_node_levels.begin(), sorted_node_levels.end(), thrust::make_permutation_iterator(parent_node_ids.begin(), sorted_order.begin()), thrust::make_permutation_iterator(parent_node_ids.begin(), sorted_order.begin()), thrust::equal_to<TreeDepthT>{}, thrust::maximum<NodeIndexT>{}); } // Generates a tree representation of the given tokens, token_indices. tree_meta_t get_tree_representation(device_span<PdaTokenT const> tokens, device_span<SymbolOffsetT const> token_indices, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { CUDF_FUNC_RANGE(); // Whether a token does represent a node in the tree representation auto const is_node = [] __device__(PdaTokenT const token) -> bool { switch (token) { case token_t::StructBegin: case token_t::ListBegin: case token_t::StringBegin: case token_t::ValueBegin: case token_t::FieldNameBegin: case token_t::ErrorBegin: return true; default: return false; }; }; // Whether the token pops from the parent node stack auto const does_pop = [] __device__(PdaTokenT const token) -> bool { switch (token) { case token_t::StructMemberEnd: case token_t::StructEnd: case token_t::ListEnd: return true; default: return false; }; }; // Whether the token pushes onto the parent node stack auto const does_push = [] __device__(PdaTokenT const token) -> bool { switch (token) { case token_t::FieldNameBegin: case token_t::StructBegin: case token_t::ListBegin: return true; default: return false; }; }; // Look for ErrorBegin and report the point of error. if (auto const error_count = thrust::count(rmm::exec_policy(stream), tokens.begin(), tokens.end(), token_t::ErrorBegin); error_count > 0) { auto const error_location = thrust::find(rmm::exec_policy(stream), tokens.begin(), tokens.end(), token_t::ErrorBegin); SymbolOffsetT error_index; CUDF_CUDA_TRY( cudaMemcpyAsync(&error_index, token_indices.data() + thrust::distance(tokens.begin(), error_location), sizeof(SymbolOffsetT), cudaMemcpyDefault, stream.value())); stream.synchronize(); CUDF_FAIL("JSON Parser encountered an invalid format at location " + std::to_string(error_index)); } auto const num_tokens = tokens.size(); auto const num_nodes = thrust::count_if(rmm::exec_policy(stream), tokens.begin(), tokens.end(), is_node); // Node levels: transform_exclusive_scan, copy_if. rmm::device_uvector<TreeDepthT> node_levels(num_nodes, stream, mr); { rmm::device_uvector<TreeDepthT> token_levels(num_tokens, stream); auto const push_pop_it = thrust::make_transform_iterator( tokens.begin(), [does_push, does_pop] __device__(PdaTokenT const token) -> size_type { return does_push(token) - does_pop(token); }); thrust::exclusive_scan( rmm::exec_policy(stream), push_pop_it, push_pop_it + num_tokens, token_levels.begin()); auto const node_levels_end = cudf::detail::copy_if_safe(token_levels.begin(), token_levels.end(), tokens.begin(), node_levels.begin(), is_node, stream); CUDF_EXPECTS(thrust::distance(node_levels.begin(), node_levels_end) == num_nodes, "node level count mismatch"); } // Node parent ids: // previous push node_id transform, stable sort by level, segmented scan with Max, reorder. rmm::device_uvector<NodeIndexT> parent_node_ids(num_nodes, stream, mr); // This block of code is generalized logical stack algorithm. TODO: make this a separate function. { rmm::device_uvector<NodeIndexT> node_token_ids(num_nodes, stream); cudf::detail::copy_if_safe(thrust::make_counting_iterator<NodeIndexT>(0), thrust::make_counting_iterator<NodeIndexT>(0) + num_tokens, tokens.begin(), node_token_ids.begin(), is_node, stream); // previous push node_id // if previous node is a push, then i-1 // if previous node is FE, then i-2 (returns FB's index) // if previous node is SMB and its previous node is a push, then i-2 // eg. `{ SMB FB FE VB VE SME` -> `{` index as FB's parent. // else -1 auto const first_childs_parent_token_id = [tokens_gpu = tokens.begin()] __device__(auto i) -> NodeIndexT { if (i <= 0) { return -1; } if (tokens_gpu[i - 1] == token_t::StructBegin or tokens_gpu[i - 1] == token_t::ListBegin) { return i - 1; } else if (tokens_gpu[i - 1] == token_t::FieldNameEnd) { return i - 2; } else if (tokens_gpu[i - 1] == token_t::StructMemberBegin and (tokens_gpu[i - 2] == token_t::StructBegin || tokens_gpu[i - 2] == token_t::ListBegin)) { return i - 2; } else { return -1; } }; thrust::transform( rmm::exec_policy(stream), node_token_ids.begin(), node_token_ids.end(), parent_node_ids.begin(), [node_ids_gpu = node_token_ids.begin(), num_nodes, first_childs_parent_token_id] __device__( NodeIndexT const tid) -> NodeIndexT { auto const pid = first_childs_parent_token_id(tid); return pid < 0 ? parent_node_sentinel : thrust::lower_bound(thrust::seq, node_ids_gpu, node_ids_gpu + num_nodes, pid) - node_ids_gpu; // parent_node_sentinel is -1, useful for segmented max operation below }); } // Propagate parent node to siblings from first sibling - inplace. propagate_parent_to_siblings( cudf::device_span<TreeDepthT const>{node_levels.data(), node_levels.size()}, parent_node_ids, stream); // Node categories: copy_if with transform. rmm::device_uvector<NodeT> node_categories(num_nodes, stream, mr); auto const node_categories_it = thrust::make_transform_output_iterator(node_categories.begin(), token_to_node{}); auto const node_categories_end = cudf::detail::copy_if_safe(tokens.begin(), tokens.end(), node_categories_it, is_node, stream); CUDF_EXPECTS(node_categories_end - node_categories_it == num_nodes, "node category count mismatch"); // Node ranges: copy_if with transform. rmm::device_uvector<SymbolOffsetT> node_range_begin(num_nodes, stream, mr); rmm::device_uvector<SymbolOffsetT> node_range_end(num_nodes, stream, mr); auto const node_range_tuple_it = thrust::make_zip_iterator(node_range_begin.begin(), node_range_end.begin()); // Whether the tokenizer stage should keep quote characters for string values // If the tokenizer keeps the quote characters, they may be stripped during type casting constexpr bool include_quote_char = true; auto const node_range_out_it = thrust::make_transform_output_iterator( node_range_tuple_it, node_ranges{tokens, token_indices, include_quote_char}); auto const node_range_out_end = cudf::detail::copy_if_safe( thrust::make_counting_iterator<size_type>(0), thrust::make_counting_iterator<size_type>(0) + num_tokens, node_range_out_it, [is_node, tokens_gpu = tokens.begin()] __device__(size_type i) -> bool { return is_node(tokens_gpu[i]); }, stream); CUDF_EXPECTS(node_range_out_end - node_range_out_it == num_nodes, "node range count mismatch"); return {std::move(node_categories), std::move(parent_node_ids), std::move(node_levels), std::move(node_range_begin), std::move(node_range_end)}; } /** * @brief Generates unique node_type id for each node. * Field nodes with the same name are assigned the same node_type id. * List, Struct, and String nodes are assigned their category values as node_type ids. * * All inputs and outputs are in node_id order. * @param d_input JSON string in device memory * @param d_tree Tree representation of the JSON * @param stream CUDA stream used for device memory operations and kernel launches. * @return Vector of node_type ids */ rmm::device_uvector<size_type> hash_node_type_with_field_name(device_span<SymbolT const> d_input, tree_meta_t const& d_tree, rmm::cuda_stream_view stream) { CUDF_FUNC_RANGE(); using hash_table_allocator_type = rmm::mr::stream_allocator_adaptor<default_allocator<char>>; auto const num_nodes = d_tree.node_categories.size(); auto const num_fields = thrust::count(rmm::exec_policy(stream), d_tree.node_categories.begin(), d_tree.node_categories.end(), node_t::NC_FN); auto const d_hasher = [d_input = d_input.data(), node_range_begin = d_tree.node_range_begin.data(), node_range_end = d_tree.node_range_end.data()] __device__(auto node_id) { auto const field_name = cudf::string_view(d_input + node_range_begin[node_id], node_range_end[node_id] - node_range_begin[node_id]); return cudf::hashing::detail::default_hash<cudf::string_view>{}(field_name); }; auto const d_equal = [d_input = d_input.data(), node_range_begin = d_tree.node_range_begin.data(), node_range_end = d_tree.node_range_end.data()] __device__(auto node_id1, auto node_id2) { auto const field_name1 = cudf::string_view( d_input + node_range_begin[node_id1], node_range_end[node_id1] - node_range_begin[node_id1]); auto const field_name2 = cudf::string_view( d_input + node_range_begin[node_id2], node_range_end[node_id2] - node_range_begin[node_id2]); return field_name1 == field_name2; }; // key-value pairs: uses node_id itself as node_type. (unique node_id for a field name due to // hashing) auto const iter = thrust::make_counting_iterator<size_type>(0); auto const is_field_name_node = [node_categories = d_tree.node_categories.data()] __device__(auto node_id) { return node_categories[node_id] == node_t::NC_FN; }; using hasher_type = decltype(d_hasher); constexpr size_type empty_node_index_sentinel = -1; auto key_set = cuco::experimental::static_set{cuco::experimental::extent{compute_hash_table_size( num_fields, 40)}, // 40% occupancy in hash map cuco::empty_key{empty_node_index_sentinel}, d_equal, cuco::experimental::linear_probing<1, hasher_type>{d_hasher}, hash_table_allocator_type{default_allocator<char>{}, stream}, stream.value()}; key_set.insert_if_async(iter, iter + num_nodes, thrust::counting_iterator<size_type>(0), // stencil is_field_name_node, stream.value()); auto const get_hash_value = [key_set = key_set.ref(cuco::experimental::op::find)] __device__(auto node_id) -> size_type { auto const it = key_set.find(node_id); return (it == key_set.end()) ? size_type{0} : *it; }; // convert field nodes to node indices, and other nodes to enum value. rmm::device_uvector<size_type> node_type(num_nodes, stream); thrust::tabulate(rmm::exec_policy(stream), node_type.begin(), node_type.end(), [node_categories = d_tree.node_categories.data(), is_field_name_node, get_hash_value] __device__(auto node_id) -> size_type { if (is_field_name_node(node_id)) return static_cast<size_type>(NUM_NODE_CLASSES) + get_hash_value(node_id); else return static_cast<size_type>(node_categories[node_id]); }); return node_type; } std::pair<rmm::device_uvector<NodeIndexT>, rmm::device_uvector<NodeIndexT>> get_array_children_indices(TreeDepthT row_array_children_level, device_span<TreeDepthT const> node_levels, device_span<NodeIndexT const> parent_node_ids, rmm::cuda_stream_view stream) { // array children level: (level 2 for values, level 1 for values-JSONLines format) // copy nodes id of level 1's children (level 2) // exclusive scan by key (on key their parent_node_id, because we need indices in each row. // parent_node_id for each row will be same). // -> return their indices and their node id auto const num_nodes = node_levels.size(); auto num_level2_nodes = thrust::count( rmm::exec_policy(stream), node_levels.begin(), node_levels.end(), row_array_children_level); rmm::device_uvector<NodeIndexT> level2_nodes(num_level2_nodes, stream); rmm::device_uvector<NodeIndexT> level2_indices(num_level2_nodes, stream); auto const iter = thrust::copy_if(rmm::exec_policy(stream), thrust::counting_iterator<NodeIndexT>(0), thrust::counting_iterator<NodeIndexT>(num_nodes), node_levels.begin(), level2_nodes.begin(), [row_array_children_level] __device__(auto level) { return level == row_array_children_level; }); auto level2_parent_nodes = thrust::make_permutation_iterator(parent_node_ids.begin(), level2_nodes.cbegin()); thrust::exclusive_scan_by_key(rmm::exec_policy(stream), level2_parent_nodes, level2_parent_nodes + num_level2_nodes, thrust::make_constant_iterator(NodeIndexT{1}), level2_indices.begin()); return std::make_pair(std::move(level2_nodes), std::move(level2_indices)); } // Two level hashing algorithm // 1. Convert node_category+fieldname to node_type. (passed as argument) // a. Create a hashmap to hash field name and assign unique node id as values. // b. Convert the node categories to node types. // Node type is defined as node category enum value if it is not a field node, // otherwise it is the unique node id assigned by the hashmap (value shifted by #NUM_CATEGORY). // 2. Set operation on entire path of each node // a. Create a hash map with hash of {node_level, node_type} of its node and the entire parent // until root. // b. While creating hashmap, transform node id to unique node ids that are inserted into the // hash map. This mimics set operation with hash map. This unique node ids are set ids. // c. Return this converted set ids, which are the hash map keys/values, and unique set ids. std::pair<rmm::device_uvector<size_type>, rmm::device_uvector<size_type>> hash_node_path( device_span<TreeDepthT const> node_levels, device_span<size_type const> node_type, device_span<NodeIndexT const> parent_node_ids, bool is_array_of_arrays, bool is_enabled_lines, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { CUDF_FUNC_RANGE(); auto const num_nodes = parent_node_ids.size(); // array of arrays NodeIndexT const row_array_children_level = is_enabled_lines ? 1 : 2; rmm::device_uvector<size_type> list_indices(0, stream); if (is_array_of_arrays) { // For array of arrays, level 2 nodes do not have column name (field name). // So, we need to generate indices for each level 2 node w.r.t to that row, to uniquely // identify each level 2 node as separate column. // Example: // array of structs: [ { a: 1, b: 2}, { a: 3, b: 4} ] // levels: 0 1 2 3 2 3 1 2 3 2 3 // array of arrays: [ [ 1, 2], [ 3, 4] ] // levels: 0 1 2 2 1 2 2 // For example, in the above example, we need to generate indices for each level 2 node: // array of arrays: [ [ 1, 2], [ 3, 4] ] // levels: 0 1 2 2 1 2 2 // child indices: 0 1 0 1 // These indices uniquely identify each column in each row. This is used during hashing for // level 2 nodes to generate unique column ids, instead of field name for level 2 nodes. auto [level2_nodes, level2_indices] = get_array_children_indices(row_array_children_level, node_levels, parent_node_ids, stream); // memory usage could be reduced by using different data structure (hashmap) // or alternate method to hash it at node_type list_indices.resize(num_nodes, stream); thrust::scatter(rmm::exec_policy(stream), level2_indices.cbegin(), level2_indices.cend(), level2_nodes.cbegin(), list_indices.begin()); } // path compression is not used since extra writes make all map operations slow. auto const d_hasher = [node_level = node_levels.begin(), node_type = node_type.begin(), parent_node_ids = parent_node_ids.begin(), list_indices = list_indices.begin(), is_array_of_arrays, row_array_children_level] __device__(auto node_id) { auto hash = cudf::hashing::detail::hash_combine( cudf::hashing::detail::default_hash<TreeDepthT>{}(node_level[node_id]), cudf::hashing::detail::default_hash<size_type>{}(node_type[node_id])); node_id = parent_node_ids[node_id]; // Each node computes its hash by walking from its node up to the root. while (node_id != parent_node_sentinel) { hash = cudf::hashing::detail::hash_combine( hash, cudf::hashing::detail::default_hash<TreeDepthT>{}(node_level[node_id])); hash = cudf::hashing::detail::hash_combine( hash, cudf::hashing::detail::default_hash<size_type>{}(node_type[node_id])); if (is_array_of_arrays and node_level[node_id] == row_array_children_level) hash = cudf::hashing::detail::hash_combine(hash, list_indices[node_id]); node_id = parent_node_ids[node_id]; } return hash; }; rmm::device_uvector<hash_value_type> node_hash(num_nodes, stream); thrust::tabulate(rmm::exec_policy(stream), node_hash.begin(), node_hash.end(), d_hasher); auto const d_hashed_cache = [node_hash = node_hash.begin()] __device__(auto node_id) { return node_hash[node_id]; }; auto const d_equal = [node_level = node_levels.begin(), node_type = node_type.begin(), parent_node_ids = parent_node_ids.begin(), is_array_of_arrays, row_array_children_level, list_indices = list_indices.begin(), d_hashed_cache] __device__(auto node_id1, auto node_id2) { if (node_id1 == node_id2) return true; if (d_hashed_cache(node_id1) != d_hashed_cache(node_id2)) return false; auto const is_equal_level = [node_level, node_type, is_array_of_arrays, row_array_children_level, list_indices]( auto node_id1, auto node_id2) { if (node_id1 == node_id2) return true; auto const is_level2_equal = [&]() { if (!is_array_of_arrays) return true; return node_level[node_id1] != row_array_children_level or list_indices[node_id1] == list_indices[node_id2]; }(); return node_level[node_id1] == node_level[node_id2] and node_type[node_id1] == node_type[node_id2] and is_level2_equal; }; // if both nodes have same node types at all levels, it will check until it has common parent // or root. while (node_id1 != parent_node_sentinel and node_id2 != parent_node_sentinel and node_id1 != node_id2 and is_equal_level(node_id1, node_id2)) { node_id1 = parent_node_ids[node_id1]; node_id2 = parent_node_ids[node_id2]; } return node_id1 == node_id2; }; constexpr size_type empty_node_index_sentinel = -1; using hash_table_allocator_type = rmm::mr::stream_allocator_adaptor<default_allocator<char>>; using hasher_type = decltype(d_hashed_cache); auto key_set = cuco::experimental::static_set{ cuco::experimental::extent{compute_hash_table_size(num_nodes)}, cuco::empty_key<cudf::size_type>{empty_node_index_sentinel}, d_equal, cuco::experimental::linear_probing<1, hasher_type>{d_hashed_cache}, hash_table_allocator_type{default_allocator<char>{}, stream}, stream.value()}; // insert and convert node ids to unique set ids auto nodes_itr = thrust::make_counting_iterator<size_type>(0); auto const num_columns = key_set.insert(nodes_itr, nodes_itr + num_nodes, stream.value()); rmm::device_uvector<size_type> unique_keys(num_columns, stream); rmm::device_uvector<size_type> col_id(num_nodes, stream, mr); key_set.find_async(nodes_itr, nodes_itr + num_nodes, col_id.begin(), stream.value()); std::ignore = key_set.retrieve_all(unique_keys.begin(), stream.value()); return {std::move(col_id), std::move(unique_keys)}; } /** * @brief Generates column id and parent column id for each node * * 1. Generate col_id: * a. Set operation on entire path of each node, translate each node id to set id. * (two level hashing) * b. gather unique set ids. * c. sort and use binary search to generate column ids. * d. Translate parent node ids to parent column ids. * * All inputs and outputs are in node_id order. * @param d_input JSON string in device memory * @param d_tree Tree representation of the JSON * @param is_array_of_arrays Whether the tree is an array of arrays * @param is_enabled_lines Whether the input is a line-delimited JSON * @param stream CUDA stream used for device memory operations and kernel launches * @param mr Device memory resource used to allocate the returned column's device memory * @return column_id, parent_column_id */ std::pair<rmm::device_uvector<NodeIndexT>, rmm::device_uvector<NodeIndexT>> generate_column_id( device_span<SymbolT const> d_input, tree_meta_t const& d_tree, bool is_array_of_arrays, bool is_enabled_lines, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { CUDF_FUNC_RANGE(); auto const num_nodes = d_tree.node_categories.size(); // Two level hashing: // one for field names -> node_type and, // another for {node_level, node_category} + field hash for the entire path // which is {node_level, node_type} recursively using parent_node_id auto [col_id, unique_keys] = [&]() { // Convert node_category + field_name to node_type. rmm::device_uvector<size_type> node_type = hash_node_type_with_field_name(d_input, d_tree, stream); // hash entire path from node to root. return hash_node_path(d_tree.node_levels, node_type, d_tree.parent_node_ids, is_array_of_arrays, is_enabled_lines, stream, mr); }(); thrust::sort(rmm::exec_policy(stream), unique_keys.begin(), unique_keys.end()); thrust::lower_bound(rmm::exec_policy(stream), unique_keys.begin(), unique_keys.end(), col_id.begin(), col_id.end(), col_id.begin()); rmm::device_uvector<size_type> parent_col_id(num_nodes, stream, mr); thrust::transform(rmm::exec_policy(stream), d_tree.parent_node_ids.begin(), d_tree.parent_node_ids.end(), parent_col_id.begin(), [col_id = col_id.begin()] __device__(auto node_id) { return node_id >= 0 ? col_id[node_id] : parent_node_sentinel; }); return {std::move(col_id), std::move(parent_col_id)}; } /** * @brief Computes row indices of each node in the hierarchy. * 2. Generate row_offset. * a. Extract only list children * b. stable_sort by parent_col_id. * c. scan_by_key {parent_col_id} (done only on nodes who's parent is list) * d. propagate to non-list leaves from parent list node by recursion * * pre-condition: * d_tree.node_categories, d_tree.parent_node_ids, parent_col_id are in order of node_id. * post-condition: row_offsets is in order of node_id. * parent_col_id is moved and reused inside this function. * @param parent_col_id parent node's column id * @param d_tree Tree representation of the JSON string * @param is_array_of_arrays Whether the tree is an array of arrays * @param is_enabled_lines Whether the input is a line-delimited JSON * @param stream CUDA stream used for device memory operations and kernel launches. * @param mr Device memory resource used to allocate the returned column's device memory. * @return row_offsets */ rmm::device_uvector<size_type> compute_row_offsets(rmm::device_uvector<NodeIndexT>&& parent_col_id, tree_meta_t const& d_tree, bool is_array_of_arrays, bool is_enabled_lines, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { CUDF_FUNC_RANGE(); auto const num_nodes = d_tree.node_categories.size(); rmm::device_uvector<size_type> scatter_indices(num_nodes, stream); thrust::sequence(rmm::exec_policy(stream), scatter_indices.begin(), scatter_indices.end()); // array of arrays NodeIndexT const row_array_parent_level = is_enabled_lines ? 0 : 1; // condition is true if parent is not a list, or sentinel/root // Special case to return true if parent is a list and is_array_of_arrays is true auto is_non_list_parent = [node_categories = d_tree.node_categories.begin(), node_levels = d_tree.node_levels.begin(), is_array_of_arrays, row_array_parent_level] __device__(auto pnid) { return !(pnid == parent_node_sentinel || node_categories[pnid] == NC_LIST && (!is_array_of_arrays || node_levels[pnid] != row_array_parent_level)); }; // Extract only list children. (nodes who's parent is a list/root) auto const list_parent_end = thrust::remove_if(rmm::exec_policy(stream), thrust::make_zip_iterator(parent_col_id.begin(), scatter_indices.begin()), thrust::make_zip_iterator(parent_col_id.end(), scatter_indices.end()), d_tree.parent_node_ids.begin(), is_non_list_parent); auto const num_list_parent = thrust::distance( thrust::make_zip_iterator(parent_col_id.begin(), scatter_indices.begin()), list_parent_end); thrust::stable_sort_by_key(rmm::exec_policy(stream), parent_col_id.begin(), parent_col_id.begin() + num_list_parent, scatter_indices.begin()); rmm::device_uvector<size_type> row_offsets(num_nodes, stream, mr); // TODO is it possible to generate list child_offsets too here? // write only 1st child offset to parent node id child_offsets? thrust::exclusive_scan_by_key(rmm::exec_policy(stream), parent_col_id.begin(), parent_col_id.begin() + num_list_parent, thrust::make_constant_iterator<size_type>(1), row_offsets.begin()); // Using scatter instead of sort. auto& temp_storage = parent_col_id; // reuse parent_col_id as temp storage thrust::scatter(rmm::exec_policy(stream), row_offsets.begin(), row_offsets.begin() + num_list_parent, scatter_indices.begin(), temp_storage.begin()); row_offsets = std::move(temp_storage); // Propagate row offsets to non-list leaves from list's immediate children node by recursion thrust::transform_if( rmm::exec_policy(stream), thrust::make_counting_iterator<size_type>(0), thrust::make_counting_iterator<size_type>(num_nodes), row_offsets.begin(), [node_categories = d_tree.node_categories.data(), parent_node_ids = d_tree.parent_node_ids.begin(), row_offsets = row_offsets.begin(), is_non_list_parent] __device__(size_type node_id) { auto parent_node_id = parent_node_ids[node_id]; while (is_non_list_parent(parent_node_id)) { node_id = parent_node_id; parent_node_id = parent_node_ids[parent_node_id]; } return row_offsets[node_id]; }, [node_categories = d_tree.node_categories.data(), parent_node_ids = d_tree.parent_node_ids.begin(), is_non_list_parent] __device__(size_type node_id) { auto const parent_node_id = parent_node_ids[node_id]; return is_non_list_parent(parent_node_id); }); return row_offsets; } // This algorithm assigns a unique column id to each node in the tree. // The row offset is the row index of the node in that column id. // Algorithm: // 1. Generate col_id: // a. Set operation on entire path of each node, translate each node id to set id. // b. gather unique set ids. // c. sort and use binary search to generate column ids. // d. Translate parent node ids to parent column ids. // 2. Generate row_offset. // a. filter only list children // a. stable_sort by parent_col_id. // b. scan_by_key {parent_col_id} (done only on nodes whose parent is a list) // c. propagate to non-list leaves from parent list node by recursion std::tuple<rmm::device_uvector<NodeIndexT>, rmm::device_uvector<size_type>> records_orient_tree_traversal(device_span<SymbolT const> d_input, tree_meta_t const& d_tree, bool is_array_of_arrays, bool is_enabled_lines, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { CUDF_FUNC_RANGE(); auto [new_col_id, new_parent_col_id] = generate_column_id(d_input, d_tree, is_array_of_arrays, is_enabled_lines, stream, mr); auto row_offsets = compute_row_offsets( std::move(new_parent_col_id), d_tree, is_array_of_arrays, is_enabled_lines, stream, mr); return std::tuple{std::move(new_col_id), std::move(row_offsets)}; } } // namespace detail } // namespace cudf::io::json
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/json/read_json.cu
/* * Copyright (c) 2022-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "read_json.hpp" #include <io/comp/io_uncomp.hpp> #include <io/json/legacy/read_json.hpp> #include <io/json/nested_json.hpp> #include <cudf/detail/nvtx/ranges.hpp> #include <cudf/detail/utilities/vector_factories.hpp> #include <cudf/utilities/error.hpp> #include <rmm/exec_policy.hpp> #include <thrust/iterator/constant_iterator.h> #include <thrust/scatter.h> #include <numeric> namespace cudf::io::json::detail { size_t sources_size(host_span<std::unique_ptr<datasource>> const sources, size_t range_offset, size_t range_size) { return std::accumulate(sources.begin(), sources.end(), 0ul, [=](size_t sum, auto& source) { auto const size = source->size(); // TODO take care of 0, 0, or *, 0 case. return sum + (range_size == 0 or range_offset + range_size > size ? size - range_offset : range_size); }); } rmm::device_uvector<char> ingest_raw_input(host_span<std::unique_ptr<datasource>> sources, compression_type compression, size_t range_offset, size_t range_size, rmm::cuda_stream_view stream) { CUDF_FUNC_RANGE(); // We append a line delimiter between two files to make sure the last line of file i and the first // line of file i+1 don't end up on the same JSON line, if file i does not already end with a line // delimiter. auto constexpr num_delimiter_chars = 1; auto const num_extra_delimiters = num_delimiter_chars * (sources.size() - 1); // Iterate through the user defined sources and read the contents into the local buffer auto const total_source_size = sources_size(sources, range_offset, range_size) + num_extra_delimiters; if (compression == compression_type::NONE) { std::vector<size_type> delimiter_map{}; delimiter_map.reserve(sources.size()); auto d_buffer = rmm::device_uvector<char>(total_source_size, stream); size_t bytes_read = 0; std::vector<std::unique_ptr<datasource::buffer>> h_buffers; for (auto const& source : sources) { if (!source->is_empty()) { auto data_size = (range_size != 0) ? range_size : source->size(); auto destination = reinterpret_cast<uint8_t*>(d_buffer.data()) + bytes_read; if (source->is_device_read_preferred(data_size)) { bytes_read += source->device_read(range_offset, data_size, destination, stream); } else { h_buffers.emplace_back(source->host_read(range_offset, data_size)); auto const& h_buffer = h_buffers.back(); CUDF_CUDA_TRY(cudaMemcpyAsync( destination, h_buffer->data(), h_buffer->size(), cudaMemcpyDefault, stream.value())); bytes_read += h_buffer->size(); } delimiter_map.push_back(bytes_read); bytes_read += num_delimiter_chars; } } // If this is a multi-file source, we scatter the JSON line delimiters between files if (sources.size() > 1) { static_assert(num_delimiter_chars == 1, "Currently only single-character delimiters are supported"); auto const delimiter_source = thrust::make_constant_iterator('\n'); auto const d_delimiter_map = cudf::detail::make_device_uvector_async( host_span<size_type const>{delimiter_map.data(), delimiter_map.size() - 1}, stream, rmm::mr::get_current_device_resource()); thrust::scatter(rmm::exec_policy_nosync(stream), delimiter_source, delimiter_source + d_delimiter_map.size(), d_delimiter_map.data(), d_buffer.data()); } stream.synchronize(); return d_buffer; } else { auto buffer = std::vector<uint8_t>(total_source_size); // Single read because only a single compressed source is supported // Reading to host because decompression of a single block is much faster on the CPU sources[0]->host_read(range_offset, total_source_size, buffer.data()); auto const uncomp_data = decompress(compression, buffer); return cudf::detail::make_device_uvector_sync( host_span<char const>{reinterpret_cast<char const*>(uncomp_data.data()), uncomp_data.size()}, stream, rmm::mr::get_current_device_resource()); } } size_type find_first_delimiter_in_chunk(host_span<std::unique_ptr<cudf::io::datasource>> sources, json_reader_options const& reader_opts, char const delimiter, rmm::cuda_stream_view stream) { auto const buffer = ingest_raw_input(sources, reader_opts.get_compression(), reader_opts.get_byte_range_offset(), reader_opts.get_byte_range_size(), stream); return find_first_delimiter(buffer, delimiter, stream); } bool should_load_whole_source(json_reader_options const& reader_opts) { return reader_opts.get_byte_range_offset() == 0 and // reader_opts.get_byte_range_size() == 0; } /** * @brief Get the byte range between record starts and ends starting from the given range. * * if get_byte_range_offset == 0, then we can skip the first delimiter search * if get_byte_range_offset != 0, then we need to search for the first delimiter in given range. * if not found, skip this chunk, if found, then search for first delimiter in next range until we * find a delimiter. Use this as actual range for parsing. * * @param sources Data sources to read from * @param reader_opts JSON reader options with range offset and range size * @param stream CUDA stream used for device memory operations and kernel launches * @return Byte range for parsing */ auto get_record_range_raw_input(host_span<std::unique_ptr<datasource>> sources, json_reader_options const& reader_opts, rmm::cuda_stream_view stream) { auto buffer = ingest_raw_input(sources, reader_opts.get_compression(), reader_opts.get_byte_range_offset(), reader_opts.get_byte_range_size(), stream); if (should_load_whole_source(reader_opts)) return buffer; auto first_delim_pos = reader_opts.get_byte_range_offset() == 0 ? 0 : find_first_delimiter(buffer, '\n', stream); if (first_delim_pos == -1) { return rmm::device_uvector<char>{0, stream}; } else { first_delim_pos = first_delim_pos + reader_opts.get_byte_range_offset(); // Find next delimiter decltype(first_delim_pos) next_delim_pos = -1; auto const total_source_size = sources_size(sources, 0, 0); auto current_offset = reader_opts.get_byte_range_offset() + reader_opts.get_byte_range_size(); while (current_offset < total_source_size and next_delim_pos == -1) { buffer = ingest_raw_input(sources, reader_opts.get_compression(), current_offset, reader_opts.get_byte_range_size(), stream); next_delim_pos = find_first_delimiter(buffer, '\n', stream); if (next_delim_pos == -1) { current_offset += reader_opts.get_byte_range_size(); } } if (next_delim_pos == -1) { next_delim_pos = total_source_size; } else { next_delim_pos = next_delim_pos + current_offset; } return ingest_raw_input(sources, reader_opts.get_compression(), first_delim_pos, next_delim_pos - first_delim_pos, stream); } } table_with_metadata read_json(host_span<std::unique_ptr<datasource>> sources, json_reader_options const& reader_opts, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { CUDF_FUNC_RANGE(); if (reader_opts.is_enabled_legacy()) { return legacy::read_json(sources, reader_opts, stream, mr); } if (not should_load_whole_source(reader_opts)) { CUDF_EXPECTS(reader_opts.is_enabled_lines(), "Specifying a byte range is supported only for JSON Lines"); CUDF_EXPECTS(sources.size() == 1, "Specifying a byte range is supported only for a single source"); } if (sources.size() > 1) { CUDF_EXPECTS(reader_opts.get_compression() == compression_type::NONE, "Multiple compressed inputs are not supported"); CUDF_EXPECTS(reader_opts.is_enabled_lines(), "Multiple inputs are supported only for JSON Lines format"); } auto const buffer = get_record_range_raw_input(sources, reader_opts, stream); return device_parse_nested_json(buffer, reader_opts, stream, mr); // For debug purposes, use host_parse_nested_json() } } // namespace cudf::io::json::detail
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/json/write_json.cu
/* * Copyright (c) 2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * @file write_json.cu * @brief cuDF-IO JSON writer implementation */ #include <io/csv/durations.hpp> #include <io/utilities/parsing_utils.cuh> #include <lists/utilities.hpp> #include <cudf/column/column_device_view.cuh> #include <cudf/column/column_factories.hpp> #include <cudf/detail/copy.hpp> #include <cudf/detail/iterator.cuh> #include <cudf/detail/null_mask.hpp> #include <cudf/detail/nvtx/ranges.hpp> #include <cudf/detail/utilities/vector_factories.hpp> #include <cudf/io/data_sink.hpp> #include <cudf/io/detail/json.hpp> #include <cudf/lists/lists_column_view.hpp> #include <cudf/scalar/scalar.hpp> #include <cudf/strings/detail/combine.hpp> #include <cudf/strings/detail/converters.hpp> #include <cudf/strings/detail/strings_children.cuh> #include <cudf/strings/strings_column_view.hpp> #include <cudf/structs/structs_column_view.hpp> #include <cudf/table/table.hpp> #include <cudf/table/table_device_view.cuh> #include <cudf/utilities/error.hpp> #include <rmm/cuda_stream_view.hpp> #include <rmm/device_uvector.hpp> #include <rmm/exec_policy.hpp> #include <rmm/mr/device/per_device_resource.hpp> #include <thrust/for_each.h> #include <thrust/gather.h> #include <thrust/host_vector.h> #include <thrust/iterator/counting_iterator.h> #include <thrust/iterator/zip_iterator.h> #include <thrust/scan.h> #include <thrust/tabulate.h> #include <algorithm> #include <memory> #include <string> #include <vector> namespace cudf::io::json::detail { std::unique_ptr<column> make_column_names_column(host_span<column_name_info const> column_names, size_type num_columns, rmm::cuda_stream_view stream); namespace { /** * @brief Functor to modify a string column for JSON format. * * This will convert escape characters and wrap quotes around strings. */ struct escape_strings_fn { column_device_view const d_column; bool const append_colon{false}; size_type* d_offsets{}; char* d_chars{}; __device__ void write_char(char_utf8 chr, char*& d_buffer, size_type& bytes) { if (d_buffer) d_buffer += cudf::strings::detail::from_char_utf8(chr, d_buffer); else bytes += cudf::strings::detail::bytes_in_char_utf8(chr); } __device__ inline char nibble_to_hex(uint8_t nibble) const { return nibble < 10 ? '0' + nibble : 'a' + nibble - 10; } __device__ void write_utf8_codepoint(uint16_t codepoint, char*& d_buffer, size_type& bytes) { if (d_buffer) { d_buffer[0] = '\\'; d_buffer[1] = 'u'; d_buffer[2] = nibble_to_hex((codepoint >> 12) & 0x0F); d_buffer[3] = nibble_to_hex((codepoint >> 8) & 0x0F); d_buffer[4] = nibble_to_hex((codepoint >> 4) & 0x0F); d_buffer[5] = nibble_to_hex((codepoint)&0x0F); d_buffer += 6; } else { bytes += 6; } } __device__ void write_utf16_codepoint(uint32_t codepoint, char*& d_buffer, size_type& bytes) { constexpr uint16_t UTF16_HIGH_SURROGATE_BEGIN = 0xD800; constexpr uint16_t UTF16_LOW_SURROGATE_BEGIN = 0xDC00; codepoint -= 0x1'0000; uint16_t hex_high = ((codepoint >> 10) & 0x3FF) + UTF16_HIGH_SURROGATE_BEGIN; uint16_t hex_low = (codepoint & 0x3FF) + UTF16_LOW_SURROGATE_BEGIN; write_utf8_codepoint(hex_high, d_buffer, bytes); write_utf8_codepoint(hex_low, d_buffer, bytes); } __device__ void operator()(size_type idx) { if (d_column.is_null(idx)) { if (!d_chars) d_offsets[idx] = 0; return; } auto const d_str = d_column.element<string_view>(idx); // entire string must be double-quoted. constexpr char_utf8 const quote = '\"'; // wrap quotes bool constexpr quote_row = true; char* d_buffer = d_chars ? d_chars + d_offsets[idx] : nullptr; size_type bytes = 0; if (quote_row) write_char(quote, d_buffer, bytes); for (auto utf8_char : d_str) { if (utf8_char > 0x0000'00FF) { // multi-byte char uint32_t codepoint = cudf::strings::detail::utf8_to_codepoint(utf8_char); if (codepoint <= 0x0000'FFFF) { // write \uXXXX utf-8 codepoint write_utf8_codepoint(codepoint, d_buffer, bytes); } else { // write \uXXXX\uXXXX utf-16 surrogate pair // codepoint > 0xFFFF && codepoint <= 0x10FFFF write_utf16_codepoint(codepoint, d_buffer, bytes); } continue; } auto escaped_chars = get_escaped_char(utf8_char); if (escaped_chars.first == '\0') { write_char(escaped_chars.second, d_buffer, bytes); } else { write_char(escaped_chars.first, d_buffer, bytes); write_char(escaped_chars.second, d_buffer, bytes); } } if (quote_row) write_char(quote, d_buffer, bytes); constexpr char_utf8 const colon = ':'; // append colon if (append_colon) write_char(colon, d_buffer, bytes); if (!d_chars) d_offsets[idx] = bytes; } std::unique_ptr<column> get_escaped_strings(column_view const& column_v, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { auto children = cudf::strings::detail::make_strings_children(*this, column_v.size(), stream, mr); return make_strings_column(column_v.size(), std::move(children.first), std::move(children.second), column_v.null_count(), cudf::detail::copy_bitmask(column_v, stream, mr)); } }; // Struct - scatter string_views of each element in a struct column struct struct_scatter_strings_fn { table_device_view const tbl; column_device_view const col_names; size_type const strviews_per_column; size_type const num_strviews_per_row; string_view const row_prefix; // "{" string_view const row_suffix; // "}" or "}\n" for json-lines string_view const value_separator; // "," string_view const narep; // null entry replacement bool const include_nulls; string_view* d_strviews; /** * @brief Scatters string_views for each element in a struct column * * @param idx Column-major index of the element to scatter */ __device__ void operator()(size_type idx) { auto const row = idx / tbl.num_columns(); auto const col = idx % tbl.num_columns(); auto const d_str_null = tbl.column(col).is_null(row); auto const this_index = row * num_strviews_per_row + col * strviews_per_column + 1; // prefix if (col == 0) d_strviews[this_index - 1] = row_prefix; if (col != 0) d_strviews[this_index - 1] = include_nulls ? value_separator : string_view{}; if (!include_nulls && d_str_null) { d_strviews[this_index] = string_view{}; d_strviews[this_index + 1] = string_view{}; } else { auto const d_col_name = col_names.element<string_view>(col); auto const d_str = d_str_null ? narep : tbl.column(col).template element<string_view>(row); // column_name: value d_strviews[this_index] = d_col_name; d_strviews[this_index + 1] = d_str; } // suffix if (col == tbl.num_columns() - 1) { d_strviews[this_index + 2] = row_suffix; } } }; struct validity_fn { table_device_view const tbl; __device__ bool operator()(size_type idx) const { auto const row = idx / tbl.num_columns(); auto const col = idx % tbl.num_columns(); return tbl.column(col).is_valid(row); } }; /** * @brief Concatenate the strings from each row of the given table as structs in JSON string * * Each row will be struct with field name as column names and values from each column in the table. * * @param strings_columns Table of strings columns * @param column_names Column of names for each column in the table * @param row_prefix Prepend this string to each row * @param row_suffix Append this string to each row * @param value_separator Separator between values * @param narep Null-String replacement * @param include_nulls Include null string entries in the output * @param stream CUDA stream used for device memory operations and kernel launches. * @param mr Device memory resource to use for device memory allocation. * @return New strings column of JSON structs in each row */ std::unique_ptr<column> struct_to_strings(table_view const& strings_columns, column_view const& column_names, string_view const row_prefix, string_view const row_suffix, string_view const value_separator, string_scalar const& narep, bool include_nulls, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { CUDF_FUNC_RANGE(); CUDF_EXPECTS(column_names.type().id() == type_id::STRING, "Column names must be of type string"); auto const num_columns = strings_columns.num_columns(); CUDF_EXPECTS(num_columns == column_names.size(), "Number of column names should be equal to number of columns in the table"); auto const strings_count = strings_columns.num_rows(); if (strings_count == 0) // empty begets empty return make_empty_column(type_id::STRING); // check all columns are of type string CUDF_EXPECTS(std::all_of(strings_columns.begin(), strings_columns.end(), [](auto const& c) { return c.type().id() == type_id::STRING; }), "All columns must be of type string"); auto constexpr strviews_per_column = 3; // (for each "column_name:", "value", "separator") auto const num_strviews_per_row = strings_columns.num_columns() * strviews_per_column + 1; // e.g. {col1: value, col2: value, col3: value} = 1 + 3 + 3 + (3-1) + 1 = 10 auto tbl_device_view = cudf::table_device_view::create(strings_columns, stream); auto d_column_names = column_device_view::create(column_names, stream); // Note for future: chunk it but maximize parallelism, if memory usage is high. auto const total_strings = num_strviews_per_row * strings_columns.num_rows(); auto const total_rows = strings_columns.num_rows() * strings_columns.num_columns(); rmm::device_uvector<string_view> d_strviews(total_strings, stream); struct_scatter_strings_fn scatter_fn{*tbl_device_view, *d_column_names, strviews_per_column, num_strviews_per_row, row_prefix, row_suffix, value_separator, narep.value(stream), include_nulls, d_strviews.begin()}; // scatter row_prefix, row_suffix, column_name:, value, value_separator as string_views thrust::for_each(rmm::exec_policy(stream), thrust::make_counting_iterator<size_type>(0), thrust::make_counting_iterator<size_type>(total_rows), scatter_fn); if (!include_nulls) { // if previous column was null, then we skip the value separator rmm::device_uvector<bool> d_str_separator(total_rows, stream); auto row_num = cudf::detail::make_counting_transform_iterator( 0, [tbl = *tbl_device_view] __device__(auto idx) -> size_type { return idx / tbl.num_columns(); }); auto validity_iterator = cudf::detail::make_counting_transform_iterator(0, validity_fn{*tbl_device_view}); thrust::exclusive_scan_by_key(rmm::exec_policy(stream), row_num, row_num + total_rows, validity_iterator, d_str_separator.begin(), false, thrust::equal_to<size_type>{}, thrust::logical_or<bool>{}); thrust::for_each(rmm::exec_policy(stream), thrust::make_counting_iterator<size_type>(0), thrust::make_counting_iterator<size_type>(total_rows), [write_separator = d_str_separator.begin(), d_strviews = d_strviews.begin(), value_separator, tbl = *tbl_device_view, strviews_per_column, num_strviews_per_row] __device__(auto idx) { auto const row = idx / tbl.num_columns(); auto const col = idx % tbl.num_columns(); auto const this_index = row * num_strviews_per_row + col * strviews_per_column + 1; if (write_separator[idx] && tbl.column(col).is_valid(row)) { d_strviews[this_index - 1] = value_separator; } }); } auto joined_col = make_strings_column(d_strviews, string_view{nullptr, 0}, stream, mr); // gather from offset and create a new string column auto old_offsets = strings_column_view(joined_col->view()).offsets(); rmm::device_uvector<size_type> row_string_offsets(strings_columns.num_rows() + 1, stream, mr); auto const d_strview_offsets = cudf::detail::make_counting_transform_iterator( 0, [num_strviews_per_row] __device__(size_type const i) { return i * num_strviews_per_row; }); thrust::gather(rmm::exec_policy(stream), d_strview_offsets, d_strview_offsets + row_string_offsets.size(), old_offsets.begin<size_type>(), row_string_offsets.begin()); return make_strings_column( strings_columns.num_rows(), std::make_unique<cudf::column>(std::move(row_string_offsets), rmm::device_buffer{}, 0), std::move(joined_col->release().children[strings_column_view::chars_column_index]), 0, {}); } /** * @brief Concatenates a list of strings columns into a single strings column. * * @param lists_strings Column containing lists of strings to concatenate. * @param list_prefix String to place before each list. (typically [) * @param list_suffix String to place after each list. (typically ]) * @param element_separator String that should inserted between strings of each list row. * @param element_narep String that should be used in place of any null strings. * @param stream CUDA stream used for device memory operations and kernel launches. * @param mr Device memory resource used to allocate the returned column's device memory. * @return New strings column with concatenated results. */ std::unique_ptr<column> join_list_of_strings(lists_column_view const& lists_strings, string_view const list_prefix, string_view const list_suffix, string_view const element_separator, string_view const element_narep, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { CUDF_FUNC_RANGE(); /* create string_views of the list elements, and the list separators and list prefix/suffix. then concatenates them all together. gather offset of first string_view of each row as offsets for output string column. Algorithm: calculate #strviews per list using null mask, and list_offsets. scan #strviews to get strviews_offset create label segments. sublist_index = index - offsets[label] strviews_offset[label] + sublist_index = string_view index +1, +2 use above 2 to scatter element, element_seperator scatter list_prefix, list_suffix to the right place using list_offsets make_strings_column() and gather offsets, based on strviews_offset. */ auto const offsets = lists_strings.offsets(); auto const strings_children = lists_strings.get_sliced_child(stream); auto const num_lists = lists_strings.size(); auto const num_strings = strings_children.size(); auto const num_offsets = offsets.size(); rmm::device_uvector<size_type> d_strview_offsets(num_offsets, stream); auto num_strings_per_list = cudf::detail::make_counting_transform_iterator( 0, [offsets = offsets.begin<size_type>(), num_offsets] __device__(size_type idx) { if (idx + 1 >= num_offsets) return 0; auto const length = offsets[idx + 1] - offsets[idx]; return length == 0 ? 2 : (2 + length + length - 1); }); thrust::exclusive_scan(rmm::exec_policy(stream), num_strings_per_list, num_strings_per_list + num_offsets, d_strview_offsets.begin()); auto const total_strings = d_strview_offsets.back_element(stream); rmm::device_uvector<string_view> d_strviews(total_strings, stream); // scatter null_list and list_prefix, list_suffix auto col_device_view = cudf::column_device_view::create(lists_strings.parent(), stream); thrust::for_each(rmm::exec_policy(stream), thrust::make_counting_iterator<size_type>(0), thrust::make_counting_iterator<size_type>(num_lists), [col = *col_device_view, list_prefix, list_suffix, d_strview_offsets = d_strview_offsets.begin(), d_strviews = d_strviews.begin()] __device__(auto idx) { if (col.is_null(idx)) { d_strviews[d_strview_offsets[idx]] = string_view{}; d_strviews[d_strview_offsets[idx] + 1] = string_view{}; } else { // [ ] d_strviews[d_strview_offsets[idx]] = list_prefix; d_strviews[d_strview_offsets[idx + 1] - 1] = list_suffix; } }); // scatter string and separator auto labels = cudf::lists::detail::generate_labels( lists_strings, num_strings, stream, rmm::mr::get_current_device_resource()); auto d_strings_children = cudf::column_device_view::create(strings_children, stream); thrust::for_each(rmm::exec_policy(stream), thrust::make_counting_iterator<size_type>(0), thrust::make_counting_iterator<size_type>(num_strings), [col = *col_device_view, d_strview_offsets = d_strview_offsets.begin(), d_strviews = d_strviews.begin(), labels = labels->view().begin<size_type>(), list_offsets = offsets.begin<size_type>(), d_strings_children = *d_strings_children, element_separator, element_narep] __device__(auto idx) { auto const label = labels[idx]; auto const sublist_index = idx - list_offsets[label]; auto const strview_index = d_strview_offsets[label] + sublist_index * 2 + 1; // value or na_rep auto const strview = d_strings_children.element<cudf::string_view>(idx); d_strviews[strview_index] = d_strings_children.is_null(idx) ? element_narep : strview; // separator if (sublist_index != 0) { d_strviews[strview_index - 1] = element_separator; } }); auto joined_col = make_strings_column(d_strviews, string_view{nullptr, 0}, stream, mr); // gather from offset and create a new string column auto old_offsets = strings_column_view(joined_col->view()).offsets(); rmm::device_uvector<size_type> row_string_offsets(num_offsets, stream, mr); thrust::gather(rmm::exec_policy(stream), d_strview_offsets.begin(), d_strview_offsets.end(), old_offsets.begin<size_type>(), row_string_offsets.begin()); return make_strings_column( num_lists, std::make_unique<cudf::column>(std::move(row_string_offsets), rmm::device_buffer{}, 0), std::move(joined_col->release().children[strings_column_view::chars_column_index]), lists_strings.null_count(), cudf::detail::copy_bitmask(lists_strings.parent(), stream, mr)); } /** * @brief Functor to convert a column to string representation for JSON format. */ struct column_to_strings_fn { /** * @brief Returns true if the specified type is not supported by the JSON writer. */ template <typename column_type> constexpr static bool is_not_handled() { // Note: the case (not std::is_same_v<column_type, bool>) is already covered by is_integral) return not((std::is_same_v<column_type, cudf::string_view>) || (std::is_integral_v<column_type>) || (std::is_floating_point_v<column_type>) || (cudf::is_fixed_point<column_type>()) || (cudf::is_timestamp<column_type>()) || (cudf::is_duration<column_type>())); } explicit column_to_strings_fn(json_writer_options const& options, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) : options_(options), stream_(stream), mr_(mr), narep(options.get_na_rep(), true, stream), struct_value_separator(",", true, stream), struct_row_begin_wrap("{", true, stream), struct_row_end_wrap("}", true, stream), list_value_separator(",", true, stream), list_row_begin_wrap("[", true, stream), list_row_end_wrap("]", true, stream), true_value(options_.get_true_value(), true, stream), false_value(options_.get_false_value(), true, stream) { } ~column_to_strings_fn() = default; column_to_strings_fn(column_to_strings_fn const&) = delete; column_to_strings_fn& operator=(column_to_strings_fn const&) = delete; column_to_strings_fn(column_to_strings_fn&&) = delete; column_to_strings_fn& operator=(column_to_strings_fn&&) = delete; // unsupported type of column: template <typename column_type> std::enable_if_t<is_not_handled<column_type>(), std::unique_ptr<column>> operator()( column_view const&) const { CUDF_FAIL("Unsupported column type."); } // Note: `null` replacement with `na_rep` deferred to `concatenate()` // instead of column-wise; might be faster. // bools: template <typename column_type> std::enable_if_t<std::is_same_v<column_type, bool>, std::unique_ptr<column>> operator()( column_view const& column) const { return cudf::strings::detail::from_booleans(column, true_value, false_value, stream_, mr_); } // strings: template <typename column_type> std::enable_if_t<std::is_same_v<column_type, cudf::string_view>, std::unique_ptr<column>> operator()(column_view const& column_v) const { auto d_column = column_device_view::create(column_v, stream_); return escape_strings_fn{*d_column}.get_escaped_strings(column_v, stream_, mr_); } // ints: template <typename column_type> std::enable_if_t<std::is_integral_v<column_type> && !std::is_same_v<column_type, bool>, std::unique_ptr<column>> operator()(column_view const& column) const { return cudf::strings::detail::from_integers(column, stream_, mr_); } // floats: template <typename column_type> std::enable_if_t<std::is_floating_point_v<column_type>, std::unique_ptr<column>> operator()( column_view const& column) const { return cudf::strings::detail::from_floats(column, stream_, mr_); } // fixed point: template <typename column_type> std::enable_if_t<cudf::is_fixed_point<column_type>(), std::unique_ptr<column>> operator()( column_view const& column) const { return cudf::strings::detail::from_fixed_point(column, stream_, mr_); } // timestamps: template <typename column_type> std::enable_if_t<cudf::is_timestamp<column_type>(), std::unique_ptr<column>> operator()( column_view const& column) const { std::string format = [&]() { if (std::is_same_v<cudf::timestamp_s, column_type>) { return std::string{"%Y-%m-%dT%H:%M:%SZ"}; } else if (std::is_same_v<cudf::timestamp_ms, column_type>) { return std::string{"%Y-%m-%dT%H:%M:%S.%3fZ"}; } else if (std::is_same_v<cudf::timestamp_us, column_type>) { return std::string{"%Y-%m-%dT%H:%M:%S.%6fZ"}; } else if (std::is_same_v<cudf::timestamp_ns, column_type>) { return std::string{"%Y-%m-%dT%H:%M:%S.%9fZ"}; } else { return std::string{"%Y-%m-%d"}; } }(); // Since format uses ":", we need to add quotes to the format format = "\"" + format + "\""; return cudf::strings::detail::from_timestamps( column, format, strings_column_view(make_empty_column(type_id::STRING)->view()), stream_, mr_); } template <typename column_type> std::enable_if_t<cudf::is_duration<column_type>(), std::unique_ptr<column>> operator()( column_view const& column) const { auto duration_string = cudf::io::detail::csv::pandas_format_durations(column, stream_, mr_); auto quotes = make_column_from_scalar(string_scalar{"\"", true, stream_}, column.size(), stream_, mr_); return cudf::strings::detail::concatenate( table_view{{quotes->view(), duration_string->view(), quotes->view()}}, string_scalar("", true, stream_), string_scalar("", false, stream_), strings::separator_on_nulls::YES, stream_, mr_); } // lists: template <typename column_type> std::enable_if_t<std::is_same_v<column_type, cudf::list_view>, std::unique_ptr<column>> operator()(column_view const& column, host_span<column_name_info const> children_names) const { auto child_view = lists_column_view(column).get_sliced_child(stream_); auto constexpr child_index = lists_column_view::child_column_index; auto child_string_with_null = [&]() { if (child_view.type().id() == type_id::STRUCT) { return this->template operator()<cudf::struct_view>(child_view, children_names.size() > child_index ? children_names[child_index].children : std::vector<column_name_info>{}); } else if (child_view.type().id() == type_id::LIST) { return this->template operator()<cudf::list_view>(child_view, children_names.size() > child_index ? children_names[child_index].children : std::vector<column_name_info>{}); } else { return cudf::type_dispatcher<cudf::id_to_type_impl, column_to_strings_fn const&>( child_view.type(), *this, child_view); } }; auto new_offsets = cudf::lists::detail::get_normalized_offsets( lists_column_view(column), stream_, rmm::mr::get_current_device_resource()); auto const list_child_string = make_lists_column( column.size(), std::move(new_offsets), std::move(child_string_with_null()), column.null_count(), cudf::detail::copy_bitmask(column, stream_, rmm::mr::get_current_device_resource()), stream_); return join_list_of_strings(lists_column_view(*list_child_string), list_row_begin_wrap.value(stream_), list_row_end_wrap.value(stream_), list_value_separator.value(stream_), narep.value(stream_), stream_, mr_); } // structs: template <typename column_type> std::enable_if_t<std::is_same_v<column_type, cudf::struct_view>, std::unique_ptr<column>> operator()(column_view const& column, host_span<column_name_info const> children_names) const { auto const child_it = cudf::detail::make_counting_transform_iterator( 0, [&stream = stream_, structs_view = structs_column_view{column}](auto const child_idx) { return structs_view.get_sliced_child(child_idx, stream); }); auto col_string = operator()(child_it, child_it + column.num_children(), children_names, struct_row_end_wrap.value(stream_)); col_string->set_null_mask(cudf::detail::copy_bitmask(column, stream_, mr_), column.null_count()); return col_string; } // Table: template <typename column_iterator> std::unique_ptr<column> operator()(column_iterator column_begin, column_iterator column_end, host_span<column_name_info const> children_names, cudf::string_view const row_end_wrap_value) const { auto const num_columns = std::distance(column_begin, column_end); auto column_names = make_column_names_column(children_names, num_columns, stream_); auto column_names_view = column_names->view(); std::vector<std::unique_ptr<cudf::column>> str_column_vec; // populate vector of string-converted columns: // auto i_col_begin = thrust::make_zip_iterator(thrust::counting_iterator<size_t>(0), column_begin); std::transform( i_col_begin, i_col_begin + num_columns, std::back_inserter(str_column_vec), [this, &children_names](auto const& i_current_col) { auto const i = thrust::get<0>(i_current_col); auto const& current_col = thrust::get<1>(i_current_col); // Struct needs children's column names if (current_col.type().id() == type_id::STRUCT) { return this->template operator()<cudf::struct_view>(current_col, children_names.size() > i ? children_names[i].children : std::vector<column_name_info>{}); } else if (current_col.type().id() == type_id::LIST) { return this->template operator()<cudf::list_view>(current_col, children_names.size() > i ? children_names[i].children : std::vector<column_name_info>{}); } else { return cudf::type_dispatcher<cudf::id_to_type_impl, column_to_strings_fn const&>( current_col.type(), *this, current_col); } }); // create string table view from str_column_vec: // auto str_table_ptr = std::make_unique<cudf::table>(std::move(str_column_vec)); auto str_table_view = str_table_ptr->view(); // concatenate columns in each row into one big string column // (using null representation and delimiter): // return struct_to_strings(str_table_view, column_names_view, struct_row_begin_wrap.value(stream_), row_end_wrap_value, struct_value_separator.value(stream_), narep, options_.is_enabled_include_nulls(), stream_, rmm::mr::get_current_device_resource()); } private: json_writer_options const& options_; rmm::cuda_stream_view stream_; rmm::mr::device_memory_resource* mr_; string_scalar const narep; // "null" // struct convert constants string_scalar const struct_value_separator; // "," string_scalar const struct_row_begin_wrap; // "{" string_scalar const struct_row_end_wrap; // "}" // list converter constants string_scalar const list_value_separator; // "," string_scalar const list_row_begin_wrap; // "[" string_scalar const list_row_end_wrap; // "]" // bool converter constants string_scalar const true_value; string_scalar const false_value; }; } // namespace std::unique_ptr<column> make_strings_column_from_host(host_span<std::string const> host_strings, rmm::cuda_stream_view stream) { std::string const host_chars = std::accumulate(host_strings.begin(), host_strings.end(), std::string("")); auto d_chars = cudf::detail::make_device_uvector_async( host_chars, stream, rmm::mr::get_current_device_resource()); std::vector<cudf::size_type> offsets(host_strings.size() + 1, 0); std::transform_inclusive_scan(host_strings.begin(), host_strings.end(), offsets.begin() + 1, std::plus<cudf::size_type>{}, [](auto& str) { return str.size(); }); auto d_offsets = cudf::detail::make_device_uvector_sync(offsets, stream, rmm::mr::get_current_device_resource()); return cudf::make_strings_column( host_strings.size(), std::move(d_offsets), std::move(d_chars), {}, 0); } std::unique_ptr<column> make_column_names_column(host_span<column_name_info const> column_names, size_type num_columns, rmm::cuda_stream_view stream) { std::vector<std::string> unescaped_column_names; if (column_names.empty()) { std::generate_n(std::back_inserter(unescaped_column_names), num_columns, [v = 0]() mutable { return std::to_string(v++); }); } else { std::transform(column_names.begin(), column_names.end(), std::back_inserter(unescaped_column_names), [](column_name_info const& name_info) { return name_info.name; }); } auto unescaped_string_col = make_strings_column_from_host(unescaped_column_names, stream); auto d_column = column_device_view::create(*unescaped_string_col, stream); return escape_strings_fn{*d_column, true}.get_escaped_strings( *unescaped_string_col, stream, rmm::mr::get_current_device_resource()); } void write_chunked(data_sink* out_sink, strings_column_view const& str_column_view, int const skip_last_chars, json_writer_options const& options, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { CUDF_FUNC_RANGE(); CUDF_EXPECTS(str_column_view.size() > 0, "Unexpected empty strings column."); auto const total_num_bytes = str_column_view.chars_size() - skip_last_chars; char const* ptr_all_bytes = str_column_view.chars_begin(); if (out_sink->is_device_write_preferred(total_num_bytes)) { // Direct write from device memory out_sink->device_write(ptr_all_bytes, total_num_bytes, stream); } else { // copy the bytes to host to write them out auto const h_bytes = cudf::detail::make_host_vector_sync( device_span<char const>(ptr_all_bytes, total_num_bytes), stream); out_sink->host_write(h_bytes.data(), total_num_bytes); } } void write_json(data_sink* out_sink, table_view const& table, json_writer_options const& options, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { CUDF_FUNC_RANGE(); std::vector<column_name_info> user_column_names = [&]() { auto const& metadata = options.get_metadata(); if (metadata.has_value() and not metadata->schema_info.empty()) { return metadata->schema_info; } else { std::vector<column_name_info> names; // generate strings 0 to table.num_columns() std::transform(thrust::make_counting_iterator(0), thrust::make_counting_iterator(table.num_columns()), std::back_inserter(names), [](auto i) { return column_name_info{std::to_string(i)}; }); return names; } }(); auto const line_terminator = std::string(options.is_enabled_lines() ? "\n" : ","); string_scalar const d_line_terminator_with_row_end{"}" + line_terminator, true, stream}; string_scalar const d_line_terminator{line_terminator, true, stream}; // write header: required for non-record oriented output // header varies depending on orient. // write_chunked_begin(out_sink, table, user_column_names, options, stream, mr); // TODO This should go into the write_chunked_begin function std::string const list_braces{"[]"}; string_scalar const d_list_braces{list_braces, true, stream}; if (!options.is_enabled_lines()) { if (out_sink->is_device_write_preferred(1)) { out_sink->device_write(d_list_braces.data(), 1, stream); } else { out_sink->host_write(list_braces.data(), 1); } } if (table.num_rows() > 0) { auto n_rows_per_chunk = options.get_rows_per_chunk(); // This outputs the JSON in row chunks to save memory. // Maybe we can use the total_rows*count calculation and a memory threshold // instead of an arbitrary chunk count. // The entire JSON chunk must fit in CPU memory before writing it out. // if (n_rows_per_chunk % 8) // must be divisible by 8 n_rows_per_chunk += 8 - (n_rows_per_chunk % 8); CUDF_EXPECTS(n_rows_per_chunk >= 8, "write_json: invalid chunk_rows; must be at least 8"); auto num_rows = table.num_rows(); std::vector<table_view> vector_views; if (num_rows <= n_rows_per_chunk) { vector_views.push_back(table); } else { auto const n_chunks = num_rows / n_rows_per_chunk; std::vector<size_type> splits(n_chunks); thrust::tabulate(splits.begin(), splits.end(), [n_rows_per_chunk](auto idx) { return (idx + 1) * n_rows_per_chunk; }); // split table_view into chunks: vector_views = cudf::detail::split(table, splits, stream); } // convert each chunk to JSON: column_to_strings_fn converter{options, stream, rmm::mr::get_current_device_resource()}; for (auto&& sub_view : vector_views) { // Skip if the table has no rows if (sub_view.num_rows() == 0) continue; std::vector<std::unique_ptr<column>> str_column_vec; // struct converter for the table auto str_concat_col = converter(sub_view.begin(), sub_view.end(), user_column_names, d_line_terminator_with_row_end.value(stream)); // Needs line_terminator at the end, to separate from next chunk bool const include_line_terminator = (&sub_view != &vector_views.back()) or options.is_enabled_lines(); auto const skip_last_chars = (include_line_terminator ? 0 : line_terminator.size()); write_chunked(out_sink, str_concat_col->view(), skip_last_chars, options, stream, mr); } } else { if (options.is_enabled_lines()) { if (out_sink->is_device_write_preferred(1)) { out_sink->device_write(d_line_terminator.data(), d_line_terminator.size(), stream); } else { out_sink->host_write(line_terminator.data(), line_terminator.size()); } } } // TODO write_chunked_end(out_sink, options, stream, mr); if (!options.is_enabled_lines()) { if (out_sink->is_device_write_preferred(1)) { out_sink->device_write(d_list_braces.data() + 1, 1, stream); } else { out_sink->host_write(list_braces.data() + 1, 1); } } } } // namespace cudf::io::json::detail
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/json/nested_json.hpp
/* * Copyright (c) 2022-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #pragma once #include <cudf/io/detail/tokenize_json.hpp> #include <cudf/io/types.hpp> #include <cudf/types.hpp> #include <cudf/utilities/bit.hpp> #include <cudf/utilities/error.hpp> #include <map> #include <vector> namespace cudf::io::json { /** * @brief Struct that encapsulate all information of a columnar tree representation. */ struct tree_meta_t { rmm::device_uvector<NodeT> node_categories; rmm::device_uvector<NodeIndexT> parent_node_ids; rmm::device_uvector<TreeDepthT> node_levels; rmm::device_uvector<SymbolOffsetT> node_range_begin; rmm::device_uvector<SymbolOffsetT> node_range_end; }; /** * @brief A column type */ enum class json_col_t : char { ListColumn, StructColumn, StringColumn, Unknown }; /** * @brief Enum class to specify whether we just push onto and pop from the stack or whether we also * reset to an empty stack on a newline character. */ enum class stack_behavior_t : char { /// Opening brackets and braces, [, {, push onto the stack, closing brackets and braces, ], }, pop /// from the stack PushPopWithoutReset, /// Opening brackets and braces, [, {, push onto the stack, closing brackets and braces, ], }, pop /// from the stack. Newline characters are considered delimiters and therefore reset to an empty /// stack. ResetOnDelimiter }; // Default name for a list's child column constexpr auto list_child_name{"element"}; /** * @brief Intermediate representation of data from a nested JSON input */ struct json_column { // Type used to count number of rows using row_offset_t = size_type; // The inferred type of this column (list, struct, or value/string column) json_col_t type = json_col_t::Unknown; std::vector<row_offset_t> string_offsets; std::vector<row_offset_t> string_lengths; // Row offsets std::vector<row_offset_t> child_offsets; // Validity bitmap std::vector<bitmask_type> validity; row_offset_t valid_count = 0; // Map of child columns, if applicable. // Following "items" as the default child column's name of a list column // Using the struct's field names std::map<std::string, json_column> child_columns; std::vector<std::string> column_order; // Counting the current number of items in this column row_offset_t current_offset = 0; json_column() = default; json_column(json_column&& other) = default; json_column& operator=(json_column&&) = default; json_column(json_column const&) = delete; json_column& operator=(json_column const&) = delete; /** * @brief Fills the rows up to the given \p up_to_row_offset with nulls. * * @param up_to_row_offset The row offset up to which to fill with nulls. */ void null_fill(row_offset_t up_to_row_offset); /** * @brief Recursively iterates through the tree of columns making sure that all child columns of a * struct column have the same row count, filling missing rows with nulls. * * @param min_row_count The minimum number of rows to be filled. */ void level_child_cols_recursively(row_offset_t min_row_count); /** * @brief Appends the row at the given index to the column, filling all rows between the column's * current offset and the given \p row_index with null items. * * @param row_index The row index at which to insert the given row * @param row_type The row's type * @param string_offset The string offset within the original JSON input of this item * @param string_end The one-past-the-last-char offset within the original JSON input of this item * @param child_count In case of a list column, this row's number of children is used to compute * the offsets */ void append_row(uint32_t row_index, json_col_t row_type, uint32_t string_offset, uint32_t string_end, uint32_t child_count); }; /** * @brief Intermediate representation of data from a nested JSON input, in device memory. * Device memory equivalent of `json_column`. */ struct device_json_column { // Type used to count number of rows using row_offset_t = size_type; // The inferred type of this column (list, struct, or value/string column) json_col_t type = json_col_t::Unknown; rmm::device_uvector<row_offset_t> string_offsets; rmm::device_uvector<row_offset_t> string_lengths; // Row offsets rmm::device_uvector<row_offset_t> child_offsets; // Validity bitmap rmm::device_buffer validity; // Map of child columns, if applicable. // Following "element" as the default child column's name of a list column // Using the struct's field names std::map<std::string, device_json_column> child_columns; std::vector<std::string> column_order; // Counting the current number of items in this column row_offset_t num_rows = 0; /** * @brief Construct a new d json column object * * @note `mr` is used for allocating the device memory for child_offsets, and validity * since it will moved into cudf::column later. * * @param stream The CUDA stream to which kernels are dispatched * @param mr Optional, resource with which to allocate */ device_json_column(rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) : string_offsets(0, stream), string_lengths(0, stream), child_offsets(0, stream, mr), validity(0, stream, mr) { } }; namespace detail { // TODO: return device_uvector instead of passing pre-allocated memory /** * @brief Identifies the stack context for each character from a JSON input. Specifically, we * identify brackets and braces outside of quoted fields (e.g., field names, strings). * At this stage, we do not perform bracket matching, i.e., we do not verify whether a closing * bracket would actually pop a the corresponding opening brace. * * @param[in] json_in The string of input characters * @param[out] d_top_of_stack Will be populated with what-is-on-top-of-the-stack for any given input * character of \p d_json_in, where a '{' represents that the corresponding input character is * within the context of a struct, a '[' represents that it is within the context of an array, and a * '_' symbol that it is at the root of the JSON. * @param[in] stack_behavior Specifies the stack's behavior * @param[in] stream The cuda stream to dispatch GPU kernels to */ void get_stack_context(device_span<SymbolT const> json_in, SymbolT* d_top_of_stack, stack_behavior_t stack_behavior, rmm::cuda_stream_view stream); /** * @brief Post-processes a token stream that may contain tokens from invalid lines. Expects that the * token stream begins with a LineEnd token. * * @param tokens The tokens to be post-processed * @param token_indices The tokens' corresponding indices that are post-processed * @param stream The cuda stream to dispatch GPU kernels to * @return Returns the post-processed token stream */ std::pair<rmm::device_uvector<PdaTokenT>, rmm::device_uvector<SymbolOffsetT>> process_token_stream( device_span<PdaTokenT const> tokens, device_span<SymbolOffsetT const> token_indices, rmm::cuda_stream_view stream); /** * @brief Parses the given JSON string and generates a tree representation of the given input. * * @param tokens Vector of token types in the json string * @param token_indices The indices within the input string corresponding to each token * @param stream The CUDA stream to which kernels are dispatched * @param mr Optional, resource with which to allocate * @return A tree representation of the input JSON string as vectors of node type, parent index, * level, begin index, and end index in the input JSON string */ tree_meta_t get_tree_representation(device_span<PdaTokenT const> tokens, device_span<SymbolOffsetT const> token_indices, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr); /** * @brief Traverse the tree representation of the JSON input in records orient format and populate * the output columns indices and row offsets within that column. * * @param d_input The JSON input * @param d_tree A tree representation of the input JSON string as vectors of node type, parent * index, level, begin index, and end index in the input JSON string * @param is_array_of_arrays Whether the tree is an array of arrays * @param is_enabled_lines Whether the input is a line-delimited JSON * @param stream The CUDA stream to which kernels are dispatched * @param mr Optional, resource with which to allocate * @return A tuple of the output column indices and the row offsets within each column for each node */ std::tuple<rmm::device_uvector<NodeIndexT>, rmm::device_uvector<size_type>> records_orient_tree_traversal(device_span<SymbolT const> d_input, tree_meta_t const& d_tree, bool is_array_of_arrays, bool is_enabled_lines, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr); /** * @brief Searches for and selects nodes at level `row_array_children_level`. For each selected * node, the function outputs the original index of that node (i.e., the nodes index within * `node_levels`) and also generates the child index of that node relative to other children of the * same parent. E.g., the child indices of the following string nodes relative to their respective * list parents are: `[["a", "b", "c"], ["d", "e"]]`: `"a": 0, "b": 1, "c": 2, "d": 0, "e": 1`. * * @param row_array_children_level Level of the nodes to search for * @param node_levels Levels of each node in the tree * @param parent_node_ids Parent node ids of each node in the tree * @param stream The CUDA stream to which kernels are dispatched * @return A pair of device_uvector containing the original node indices and their corresponding * child index */ std::pair<rmm::device_uvector<NodeIndexT>, rmm::device_uvector<NodeIndexT>> get_array_children_indices(TreeDepthT row_array_children_level, device_span<TreeDepthT const> node_levels, device_span<NodeIndexT const> parent_node_ids, rmm::cuda_stream_view stream); /** * @brief Reduce node tree into column tree by aggregating each property of column. * * @param tree json node tree to reduce (modified in-place, but restored to original state) * @param col_ids column ids of each node (modified in-place, but restored to original state) * @param row_offsets row offsets of each node (modified in-place, but restored to original state) * @param stream The CUDA stream to which kernels are dispatched * @return A tuple containing the column tree, identifier for each column and the maximum row index * in each column */ std::tuple<tree_meta_t, rmm::device_uvector<NodeIndexT>, rmm::device_uvector<size_type>> reduce_to_column_tree(tree_meta_t& tree, device_span<NodeIndexT> col_ids, device_span<size_type> row_offsets, rmm::cuda_stream_view stream); /** @copydoc host_parse_nested_json * All processing is done in device memory. * */ table_with_metadata device_parse_nested_json(device_span<SymbolT const> input, cudf::io::json_reader_options const& options, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr); /** * @brief Parses the given JSON string and generates table from the given input. * * @param input The JSON input * @param options Parsing options specifying the parsing behaviour * @param stream The CUDA stream to which kernels are dispatched * @param mr Optional, resource with which to allocate * @return The data parsed from the given JSON input */ table_with_metadata host_parse_nested_json(device_span<SymbolT const> input, cudf::io::json_reader_options const& options, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr); } // namespace detail } // namespace cudf::io::json
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/json/json_column.cu
/* * Copyright (c) 2022-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "nested_json.hpp" #include <io/utilities/parsing_utils.cuh> #include <io/utilities/string_parsing.hpp> #include <cudf/column/column_factories.hpp> #include <cudf/detail/null_mask.hpp> #include <cudf/detail/nvtx/ranges.hpp> #include <cudf/detail/utilities/vector_factories.hpp> #include <cudf/detail/utilities/visitor_overload.hpp> #include <cudf/strings/strings_column_view.hpp> #include <cudf/types.hpp> #include <cudf/utilities/error.hpp> #include <cudf/utilities/span.hpp> #include <rmm/cuda_stream_view.hpp> #include <rmm/device_uvector.hpp> #include <rmm/exec_policy.hpp> #include <thrust/count.h> #include <thrust/for_each.h> #include <thrust/functional.h> #include <thrust/gather.h> #include <thrust/iterator/counting_iterator.h> #include <thrust/iterator/discard_iterator.h> #include <thrust/iterator/permutation_iterator.h> #include <thrust/iterator/zip_iterator.h> #include <thrust/reduce.h> #include <thrust/scan.h> #include <thrust/sort.h> #include <thrust/transform.h> #include <thrust/unique.h> #include <cuda/atomic> #include <algorithm> #include <cstdint> namespace cudf::io::json::detail { // DEBUG prints auto to_cat = [](auto v) -> std::string { switch (v) { case NC_STRUCT: return " S"; case NC_LIST: return " L"; case NC_STR: return " \""; case NC_VAL: return " V"; case NC_FN: return " F"; case NC_ERR: return "ER"; default: return "UN"; }; }; auto to_int = [](auto v) { return std::to_string(static_cast<int>(v)); }; auto print_vec = [](auto const& cpu, auto const name, auto converter) { for (auto const& v : cpu) printf("%3s,", converter(v).c_str()); std::cout << name << std::endl; }; void print_tree(host_span<SymbolT const> input, tree_meta_t const& d_gpu_tree, rmm::cuda_stream_view stream) { print_vec(cudf::detail::make_std_vector_async(d_gpu_tree.node_categories, stream), "node_categories", to_cat); print_vec(cudf::detail::make_std_vector_async(d_gpu_tree.parent_node_ids, stream), "parent_node_ids", to_int); print_vec( cudf::detail::make_std_vector_async(d_gpu_tree.node_levels, stream), "node_levels", to_int); auto node_range_begin = cudf::detail::make_std_vector_async(d_gpu_tree.node_range_begin, stream); auto node_range_end = cudf::detail::make_std_vector_async(d_gpu_tree.node_range_end, stream); print_vec(node_range_begin, "node_range_begin", to_int); print_vec(node_range_end, "node_range_end", to_int); for (int i = 0; i < int(node_range_begin.size()); i++) { printf("%3s ", std::string(input.data() + node_range_begin[i], node_range_end[i] - node_range_begin[i]) .c_str()); } printf(" (JSON)\n"); } /** * @brief Reduces node tree representation to column tree representation. * * @param tree Node tree representation of JSON string * @param original_col_ids Column ids of nodes * @param sorted_col_ids Sorted column ids of nodes * @param ordered_node_ids Node ids of nodes sorted by column ids * @param row_offsets Row offsets of nodes * @param is_array_of_arrays Whether the tree is an array of arrays * @param row_array_parent_col_id Column id of row array, if is_array_of_arrays is true * @param stream CUDA stream used for device memory operations and kernel launches * @return A tuple of column tree representation of JSON string, column ids of columns, and * max row offsets of columns */ std::tuple<tree_meta_t, rmm::device_uvector<NodeIndexT>, rmm::device_uvector<size_type>> reduce_to_column_tree(tree_meta_t& tree, device_span<NodeIndexT> original_col_ids, device_span<NodeIndexT> sorted_col_ids, device_span<NodeIndexT> ordered_node_ids, device_span<size_type> row_offsets, bool is_array_of_arrays, NodeIndexT const row_array_parent_col_id, rmm::cuda_stream_view stream) { CUDF_FUNC_RANGE(); // 1. column count for allocation auto const num_columns = thrust::unique_count(rmm::exec_policy(stream), sorted_col_ids.begin(), sorted_col_ids.end()); // 2. reduce_by_key {col_id}, {row_offset}, max. rmm::device_uvector<NodeIndexT> unique_col_ids(num_columns, stream); rmm::device_uvector<size_type> max_row_offsets(num_columns, stream); auto ordered_row_offsets = thrust::make_permutation_iterator(row_offsets.begin(), ordered_node_ids.begin()); thrust::reduce_by_key(rmm::exec_policy(stream), sorted_col_ids.begin(), sorted_col_ids.end(), ordered_row_offsets, unique_col_ids.begin(), max_row_offsets.begin(), thrust::equal_to<size_type>(), thrust::maximum<size_type>()); // 3. reduce_by_key {col_id}, {node_categories} - custom opp (*+v=*, v+v=v, *+#=E) rmm::device_uvector<NodeT> column_categories(num_columns, stream); thrust::reduce_by_key( rmm::exec_policy(stream), sorted_col_ids.begin(), sorted_col_ids.end(), thrust::make_permutation_iterator(tree.node_categories.begin(), ordered_node_ids.begin()), unique_col_ids.begin(), column_categories.begin(), thrust::equal_to<size_type>(), [] __device__(NodeT type_a, NodeT type_b) -> NodeT { auto is_a_leaf = (type_a == NC_VAL || type_a == NC_STR); auto is_b_leaf = (type_b == NC_VAL || type_b == NC_STR); // (v+v=v, *+*=*, *+v=*, *+#=E, NESTED+VAL=NESTED) // *+*=*, v+v=v if (type_a == type_b) { return type_a; } else if (is_a_leaf) { // *+v=*, N+V=N // STRUCT/LIST + STR/VAL = STRUCT/LIST, STR/VAL + FN = ERR, STR/VAL + STR = STR return type_b == NC_FN ? NC_ERR : (is_b_leaf ? NC_STR : type_b); } else if (is_b_leaf) { return type_a == NC_FN ? NC_ERR : (is_a_leaf ? NC_STR : type_a); } // *+#=E return NC_ERR; }); // 4. unique_copy parent_node_ids, ranges rmm::device_uvector<TreeDepthT> column_levels(0, stream); // not required rmm::device_uvector<NodeIndexT> parent_col_ids(num_columns, stream); rmm::device_uvector<SymbolOffsetT> col_range_begin(num_columns, stream); // Field names rmm::device_uvector<SymbolOffsetT> col_range_end(num_columns, stream); rmm::device_uvector<size_type> unique_node_ids(num_columns, stream); thrust::unique_by_key_copy(rmm::exec_policy(stream), sorted_col_ids.begin(), sorted_col_ids.end(), ordered_node_ids.begin(), thrust::make_discard_iterator(), unique_node_ids.begin()); thrust::copy_n( rmm::exec_policy(stream), thrust::make_zip_iterator( thrust::make_permutation_iterator(tree.parent_node_ids.begin(), unique_node_ids.begin()), thrust::make_permutation_iterator(tree.node_range_begin.begin(), unique_node_ids.begin()), thrust::make_permutation_iterator(tree.node_range_end.begin(), unique_node_ids.begin())), unique_node_ids.size(), thrust::make_zip_iterator( parent_col_ids.begin(), col_range_begin.begin(), col_range_end.begin())); // convert parent_node_ids to parent_col_ids thrust::transform( rmm::exec_policy(stream), parent_col_ids.begin(), parent_col_ids.end(), parent_col_ids.begin(), [col_ids = original_col_ids.begin()] __device__(auto parent_node_id) -> size_type { return parent_node_id == parent_node_sentinel ? parent_node_sentinel : col_ids[parent_node_id]; }); // condition is true if parent is not a list, or sentinel/root // Special case to return true if parent is a list and is_array_of_arrays is true auto is_non_list_parent = [column_categories = column_categories.begin(), is_array_of_arrays, row_array_parent_col_id] __device__(auto parent_col_id) -> bool { return !(parent_col_id == parent_node_sentinel || column_categories[parent_col_id] == NC_LIST && (!is_array_of_arrays || parent_col_id != row_array_parent_col_id)); }; // Mixed types in List children go to different columns, // so all immediate children of list column should have same max_row_offsets. // create list's children max_row_offsets array. (initialize to zero) // atomicMax on children max_row_offsets array. // gather the max_row_offsets from children row offset array. { rmm::device_uvector<NodeIndexT> list_parents_children_max_row_offsets(num_columns, stream); thrust::fill(rmm::exec_policy(stream), list_parents_children_max_row_offsets.begin(), list_parents_children_max_row_offsets.end(), 0); thrust::for_each(rmm::exec_policy(stream), unique_col_ids.begin(), unique_col_ids.end(), [column_categories = column_categories.begin(), parent_col_ids = parent_col_ids.begin(), max_row_offsets = max_row_offsets.begin(), list_parents_children_max_row_offsets = list_parents_children_max_row_offsets.begin()] __device__(auto col_id) { auto parent_col_id = parent_col_ids[col_id]; if (parent_col_id != parent_node_sentinel and column_categories[parent_col_id] == node_t::NC_LIST) { cuda::atomic_ref<NodeIndexT, cuda::thread_scope_device> ref{ *(list_parents_children_max_row_offsets + parent_col_id)}; ref.fetch_max(max_row_offsets[col_id], cuda::std::memory_order_relaxed); } }); thrust::gather_if( rmm::exec_policy(stream), parent_col_ids.begin(), parent_col_ids.end(), parent_col_ids.begin(), list_parents_children_max_row_offsets.begin(), max_row_offsets.begin(), [column_categories = column_categories.begin()] __device__(size_type parent_col_id) { return parent_col_id != parent_node_sentinel and column_categories[parent_col_id] == node_t::NC_LIST; }); } // copy lists' max_row_offsets to children. // all structs should have same size. thrust::transform_if( rmm::exec_policy(stream), unique_col_ids.begin(), unique_col_ids.end(), max_row_offsets.begin(), [column_categories = column_categories.begin(), is_non_list_parent, parent_col_ids = parent_col_ids.begin(), max_row_offsets = max_row_offsets.begin()] __device__(size_type col_id) { auto parent_col_id = parent_col_ids[col_id]; // condition is true if parent is not a list, or sentinel/root while (is_non_list_parent(parent_col_id)) { col_id = parent_col_id; parent_col_id = parent_col_ids[parent_col_id]; } return max_row_offsets[col_id]; }, [column_categories = column_categories.begin(), is_non_list_parent, parent_col_ids = parent_col_ids.begin()] __device__(size_type col_id) { auto parent_col_id = parent_col_ids[col_id]; // condition is true if parent is not a list, or sentinel/root return is_non_list_parent(parent_col_id); }); return std::tuple{tree_meta_t{std::move(column_categories), std::move(parent_col_ids), std::move(column_levels), std::move(col_range_begin), std::move(col_range_end)}, std::move(unique_col_ids), std::move(max_row_offsets)}; } /** * @brief Get the column indices for the values column for array of arrays rows * * @param row_array_children_level The level of the row array's children * @param d_tree The tree metadata * @param col_ids The column ids * @param num_columns The number of columns * @param stream The stream to use * @return The value columns' indices */ rmm::device_uvector<NodeIndexT> get_values_column_indices(TreeDepthT const row_array_children_level, tree_meta_t const& d_tree, device_span<NodeIndexT> col_ids, size_type const num_columns, rmm::cuda_stream_view stream) { CUDF_FUNC_RANGE(); auto [level2_nodes, level2_indices] = get_array_children_indices( row_array_children_level, d_tree.node_levels, d_tree.parent_node_ids, stream); auto col_id_location = thrust::make_permutation_iterator(col_ids.begin(), level2_nodes.begin()); rmm::device_uvector<NodeIndexT> values_column_indices(num_columns, stream); thrust::scatter(rmm::exec_policy(stream), level2_indices.begin(), level2_indices.end(), col_id_location, values_column_indices.begin()); return values_column_indices; } /** * @brief Copies strings specified by pair of begin, end offsets to host vector of strings. * * @param input String device buffer * @param node_range_begin Begin offset of the strings * @param node_range_end End offset of the strings * @param stream CUDA stream * @return Vector of strings */ std::vector<std::string> copy_strings_to_host(device_span<SymbolT const> input, device_span<SymbolOffsetT const> node_range_begin, device_span<SymbolOffsetT const> node_range_end, rmm::cuda_stream_view stream) { CUDF_FUNC_RANGE(); auto const num_strings = node_range_begin.size(); rmm::device_uvector<size_type> string_offsets(num_strings, stream); rmm::device_uvector<size_type> string_lengths(num_strings, stream); auto d_offset_pairs = thrust::make_zip_iterator(node_range_begin.begin(), node_range_end.begin()); thrust::transform(rmm::exec_policy(stream), d_offset_pairs, d_offset_pairs + num_strings, thrust::make_zip_iterator(string_offsets.begin(), string_lengths.begin()), [] __device__(auto const& offsets) { // Note: first character for non-field columns return thrust::make_tuple( static_cast<size_type>(thrust::get<0>(offsets)), static_cast<size_type>(thrust::get<1>(offsets) - thrust::get<0>(offsets))); }); cudf::io::parse_options_view options_view{}; options_view.quotechar = '\0'; // no quotes options_view.keepquotes = true; auto d_offset_length_it = thrust::make_zip_iterator(string_offsets.begin(), string_lengths.begin()); auto d_column_names = parse_data(input.data(), d_offset_length_it, num_strings, data_type{type_id::STRING}, rmm::device_buffer{}, 0, options_view, stream, rmm::mr::get_current_device_resource()); auto to_host = [stream](auto const& col) { if (col.is_empty()) return std::vector<std::string>{}; auto const scv = cudf::strings_column_view(col); auto const h_chars = cudf::detail::make_std_vector_sync<char>( cudf::device_span<char const>(scv.chars().data<char>(), scv.chars().size()), stream); auto const h_offsets = cudf::detail::make_std_vector_sync( cudf::device_span<cudf::size_type const>(scv.offsets().data<cudf::size_type>() + scv.offset(), scv.size() + 1), stream); // build std::string vector from chars and offsets std::vector<std::string> host_data; host_data.reserve(col.size()); std::transform( std::begin(h_offsets), std::end(h_offsets) - 1, std::begin(h_offsets) + 1, std::back_inserter(host_data), [&](auto start, auto end) { return std::string(h_chars.data() + start, end - start); }); return host_data; }; return to_host(d_column_names->view()); } /** * @brief Holds member data pointers of `d_json_column` * */ struct json_column_data { using row_offset_t = json_column::row_offset_t; row_offset_t* string_offsets; row_offset_t* string_lengths; row_offset_t* child_offsets; bitmask_type* validity; }; /** * @brief Constructs `d_json_column` from node tree representation * Newly constructed columns are insert into `root`'s children. * `root` must be a list type. * * @param input Input JSON string device data * @param tree Node tree representation of the JSON string * @param col_ids Column ids of the nodes in the tree * @param row_offsets Row offsets of the nodes in the tree * @param root Root node of the `d_json_column` tree * @param is_array_of_arrays Whether the tree is an array of arrays * @param is_enabled_lines Whether the input is a line-delimited JSON * @param stream CUDA stream used for device memory operations and kernel launches * @param mr Device memory resource used to allocate the device memory * of child_offets and validity members of `d_json_column` */ void make_device_json_column(device_span<SymbolT const> input, tree_meta_t& tree, device_span<NodeIndexT> col_ids, device_span<size_type> row_offsets, device_json_column& root, bool is_array_of_arrays, bool is_enabled_lines, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { CUDF_FUNC_RANGE(); auto num_nodes = col_ids.size(); rmm::device_uvector<NodeIndexT> sorted_col_ids(col_ids.size(), stream); // make a copy thrust::copy(rmm::exec_policy(stream), col_ids.begin(), col_ids.end(), sorted_col_ids.begin()); // sort by {col_id} on {node_ids} stable rmm::device_uvector<NodeIndexT> node_ids(col_ids.size(), stream); thrust::sequence(rmm::exec_policy(stream), node_ids.begin(), node_ids.end()); thrust::stable_sort_by_key( rmm::exec_policy(stream), sorted_col_ids.begin(), sorted_col_ids.end(), node_ids.begin()); NodeIndexT const row_array_parent_col_id = [&]() { if (!is_array_of_arrays) return parent_node_sentinel; auto const list_node_index = is_enabled_lines ? 0 : 1; NodeIndexT value; CUDF_CUDA_TRY(cudaMemcpyAsync(&value, col_ids.data() + list_node_index, sizeof(NodeIndexT), cudaMemcpyDefault, stream.value())); stream.synchronize(); return value; }(); // 1. gather column information. auto [d_column_tree, d_unique_col_ids, d_max_row_offsets] = reduce_to_column_tree(tree, col_ids, sorted_col_ids, node_ids, row_offsets, is_array_of_arrays, row_array_parent_col_id, stream); auto num_columns = d_unique_col_ids.size(); auto unique_col_ids = cudf::detail::make_std_vector_async(d_unique_col_ids, stream); auto column_categories = cudf::detail::make_std_vector_async(d_column_tree.node_categories, stream); auto column_parent_ids = cudf::detail::make_std_vector_async(d_column_tree.parent_node_ids, stream); auto column_range_beg = cudf::detail::make_std_vector_async(d_column_tree.node_range_begin, stream); auto max_row_offsets = cudf::detail::make_std_vector_async(d_max_row_offsets, stream); std::vector<std::string> column_names = copy_strings_to_host( input, d_column_tree.node_range_begin, d_column_tree.node_range_end, stream); // array of arrays column names if (is_array_of_arrays) { TreeDepthT const row_array_children_level = is_enabled_lines ? 1 : 2; auto values_column_indices = get_values_column_indices(row_array_children_level, tree, col_ids, num_columns, stream); auto h_values_column_indices = cudf::detail::make_std_vector_async(values_column_indices, stream); std::transform(unique_col_ids.begin(), unique_col_ids.end(), column_names.begin(), column_names.begin(), [&h_values_column_indices, &column_parent_ids, row_array_parent_col_id]( auto col_id, auto name) mutable { return column_parent_ids[col_id] == row_array_parent_col_id ? std::to_string(h_values_column_indices[col_id]) : name; }); } auto to_json_col_type = [](auto category) { switch (category) { case NC_STRUCT: return json_col_t::StructColumn; case NC_LIST: return json_col_t::ListColumn; case NC_STR: [[fallthrough]]; case NC_VAL: return json_col_t::StringColumn; default: return json_col_t::Unknown; } }; auto init_to_zero = [stream](auto& v) { thrust::uninitialized_fill(rmm::exec_policy(stream), v.begin(), v.end(), 0); }; auto initialize_json_columns = [&](auto i, auto& col) { if (column_categories[i] == NC_ERR || column_categories[i] == NC_FN) { return; } else if (column_categories[i] == NC_VAL || column_categories[i] == NC_STR) { col.string_offsets.resize(max_row_offsets[i] + 1, stream); col.string_lengths.resize(max_row_offsets[i] + 1, stream); init_to_zero(col.string_offsets); init_to_zero(col.string_lengths); } else if (column_categories[i] == NC_LIST) { col.child_offsets.resize(max_row_offsets[i] + 2, stream); init_to_zero(col.child_offsets); } col.num_rows = max_row_offsets[i] + 1; col.validity = cudf::detail::create_null_mask(col.num_rows, cudf::mask_state::ALL_NULL, stream, mr); col.type = to_json_col_type(column_categories[i]); }; // 2. generate nested columns tree and its device_memory // reorder unique_col_ids w.r.t. column_range_begin for order of column to be in field order. auto h_range_col_id_it = thrust::make_zip_iterator(column_range_beg.begin(), unique_col_ids.begin()); std::sort(h_range_col_id_it, h_range_col_id_it + num_columns, [](auto const& a, auto const& b) { return thrust::get<0>(a) < thrust::get<0>(b); }); // use hash map because we may skip field name's col_ids std::unordered_map<NodeIndexT, std::reference_wrapper<device_json_column>> columns; // map{parent_col_id, child_col_name}> = child_col_id, used for null value column tracking std::map<std::pair<NodeIndexT, std::string>, NodeIndexT> mapped_columns; // find column_ids which are values, but should be ignored in validity std::vector<uint8_t> ignore_vals(num_columns, 0); columns.try_emplace(parent_node_sentinel, std::ref(root)); for (auto const this_col_id : unique_col_ids) { if (column_categories[this_col_id] == NC_ERR || column_categories[this_col_id] == NC_FN) { continue; } // Struct, List, String, Value std::string name = ""; auto parent_col_id = column_parent_ids[this_col_id]; if (parent_col_id == parent_node_sentinel || column_categories[parent_col_id] == NC_LIST) { if (is_array_of_arrays && parent_col_id == row_array_parent_col_id) { name = column_names[this_col_id]; } else { name = list_child_name; } } else if (column_categories[parent_col_id] == NC_FN) { auto field_name_col_id = parent_col_id; parent_col_id = column_parent_ids[parent_col_id]; name = column_names[field_name_col_id]; } else { CUDF_FAIL("Unexpected parent column category"); } // If the child is already found, // replace if this column is a nested column and the existing was a value column // ignore this column if this column is a value column and the existing was a nested column auto it = columns.find(parent_col_id); CUDF_EXPECTS(it != columns.end(), "Parent column not found"); auto& parent_col = it->second.get(); bool replaced = false; if (mapped_columns.count({parent_col_id, name}) > 0) { if (column_categories[this_col_id] == NC_VAL || column_categories[this_col_id] == NC_STR) { ignore_vals[this_col_id] = 1; continue; } auto old_col_id = mapped_columns[{parent_col_id, name}]; if (column_categories[old_col_id] == NC_VAL || column_categories[old_col_id] == NC_STR) { // remap ignore_vals[old_col_id] = 1; mapped_columns.erase({parent_col_id, name}); columns.erase(old_col_id); parent_col.child_columns.erase(name); replaced = true; // to skip duplicate name in column_order } else { // If this is a nested column but we're trying to insert either (a) a list node into a // struct column or (b) a struct node into a list column, we fail CUDF_EXPECTS(not((column_categories[old_col_id] == NC_LIST and column_categories[this_col_id] == NC_STRUCT) or (column_categories[old_col_id] == NC_STRUCT and column_categories[this_col_id] == NC_LIST)), "A mix of lists and structs within the same column is not supported"); } } CUDF_EXPECTS(parent_col.child_columns.count(name) == 0, "duplicate column name: " + name); // move into parent device_json_column col(stream, mr); initialize_json_columns(this_col_id, col); auto inserted = parent_col.child_columns.try_emplace(name, std::move(col)).second; CUDF_EXPECTS(inserted, "child column insertion failed, duplicate column name in the parent"); if (not replaced) parent_col.column_order.push_back(name); columns.try_emplace(this_col_id, std::ref(parent_col.child_columns.at(name))); mapped_columns.try_emplace(std::make_pair(parent_col_id, name), this_col_id); } // restore unique_col_ids order std::sort(h_range_col_id_it, h_range_col_id_it + num_columns, [](auto const& a, auto const& b) { return thrust::get<1>(a) < thrust::get<1>(b); }); // move columns data to device. std::vector<json_column_data> columns_data(num_columns); for (auto& [col_id, col_ref] : columns) { if (col_id == parent_node_sentinel) continue; auto& col = col_ref.get(); columns_data[col_id] = json_column_data{col.string_offsets.data(), col.string_lengths.data(), col.child_offsets.data(), static_cast<bitmask_type*>(col.validity.data())}; } auto d_ignore_vals = cudf::detail::make_device_uvector_async( ignore_vals, stream, rmm::mr::get_current_device_resource()); auto d_columns_data = cudf::detail::make_device_uvector_async( columns_data, stream, rmm::mr::get_current_device_resource()); // 3. scatter string offsets to respective columns, set validity bits thrust::for_each_n( rmm::exec_policy(stream), thrust::counting_iterator<size_type>(0), num_nodes, [node_categories = tree.node_categories.begin(), col_ids = col_ids.begin(), row_offsets = row_offsets.begin(), range_begin = tree.node_range_begin.begin(), range_end = tree.node_range_end.begin(), d_ignore_vals = d_ignore_vals.begin(), d_columns_data = d_columns_data.begin()] __device__(size_type i) { switch (node_categories[i]) { case NC_STRUCT: set_bit(d_columns_data[col_ids[i]].validity, row_offsets[i]); break; case NC_LIST: set_bit(d_columns_data[col_ids[i]].validity, row_offsets[i]); break; case NC_STR: [[fallthrough]]; case NC_VAL: if (d_ignore_vals[col_ids[i]]) break; set_bit(d_columns_data[col_ids[i]].validity, row_offsets[i]); d_columns_data[col_ids[i]].string_offsets[row_offsets[i]] = range_begin[i]; d_columns_data[col_ids[i]].string_lengths[row_offsets[i]] = range_end[i] - range_begin[i]; break; default: break; } }); // 4. scatter List offset // copy_if only node's whose parent is list, (node_id, parent_col_id) // stable_sort by parent_col_id of {node_id}. // For all unique parent_node_id of (i==0, i-1!=i), write start offset. // (i==last, i+1!=i), write end offset. // unique_copy_by_key {parent_node_id} {row_offset} to // col[parent_col_id].child_offsets[row_offset[parent_node_id]] auto& parent_col_ids = sorted_col_ids; // reuse sorted_col_ids auto parent_col_id = thrust::make_transform_iterator( thrust::make_counting_iterator<size_type>(0), [col_ids = col_ids.begin(), parent_node_ids = tree.parent_node_ids.begin()] __device__(size_type node_id) { return parent_node_ids[node_id] == parent_node_sentinel ? parent_node_sentinel : col_ids[parent_node_ids[node_id]]; }); auto const list_children_end = thrust::copy_if( rmm::exec_policy(stream), thrust::make_zip_iterator(thrust::make_counting_iterator<size_type>(0), parent_col_id), thrust::make_zip_iterator(thrust::make_counting_iterator<size_type>(0), parent_col_id) + num_nodes, thrust::make_counting_iterator<size_type>(0), thrust::make_zip_iterator(node_ids.begin(), parent_col_ids.begin()), [node_categories = tree.node_categories.begin(), parent_node_ids = tree.parent_node_ids.begin()] __device__(size_type node_id) { auto parent_node_id = parent_node_ids[node_id]; return parent_node_id != parent_node_sentinel and node_categories[parent_node_id] == NC_LIST; }); auto const num_list_children = list_children_end - thrust::make_zip_iterator(node_ids.begin(), parent_col_ids.begin()); thrust::stable_sort_by_key(rmm::exec_policy(stream), parent_col_ids.begin(), parent_col_ids.begin() + num_list_children, node_ids.begin()); thrust::for_each_n( rmm::exec_policy(stream), thrust::make_counting_iterator<size_type>(0), num_list_children, [node_ids = node_ids.begin(), parent_node_ids = tree.parent_node_ids.begin(), parent_col_ids = parent_col_ids.begin(), row_offsets = row_offsets.begin(), d_columns_data = d_columns_data.begin(), num_list_children] __device__(size_type i) { auto const node_id = node_ids[i]; auto const parent_node_id = parent_node_ids[node_id]; // scatter to list_offset if (i == 0 or parent_node_ids[node_ids[i - 1]] != parent_node_id) { d_columns_data[parent_col_ids[i]].child_offsets[row_offsets[parent_node_id]] = row_offsets[node_id]; } // last value of list child_offset is its size. if (i == num_list_children - 1 or parent_node_ids[node_ids[i + 1]] != parent_node_id) { d_columns_data[parent_col_ids[i]].child_offsets[row_offsets[parent_node_id] + 1] = row_offsets[node_id] + 1; } }); // 5. scan on offsets. for (auto& [id, col_ref] : columns) { auto& col = col_ref.get(); if (col.type == json_col_t::StringColumn) { thrust::inclusive_scan(rmm::exec_policy(stream), col.string_offsets.begin(), col.string_offsets.end(), col.string_offsets.begin(), thrust::maximum<json_column::row_offset_t>{}); } else if (col.type == json_col_t::ListColumn) { thrust::inclusive_scan(rmm::exec_policy(stream), col.child_offsets.begin(), col.child_offsets.end(), col.child_offsets.begin(), thrust::maximum<json_column::row_offset_t>{}); } } } /** * @brief Retrieves the parse_options to be used for type inference and type casting * * @param options The reader options to influence the relevant type inference and type casting * options */ cudf::io::parse_options parsing_options(cudf::io::json_reader_options const& options, rmm::cuda_stream_view stream); std::pair<std::unique_ptr<column>, std::vector<column_name_info>> device_json_column_to_cudf_column( device_json_column& json_col, device_span<SymbolT const> d_input, cudf::io::parse_options const& options, std::optional<schema_element> schema, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { CUDF_FUNC_RANGE(); auto validity_size_check = [](device_json_column& json_col) { CUDF_EXPECTS(json_col.validity.size() >= bitmask_allocation_size_bytes(json_col.num_rows), "valid_count is too small"); }; auto make_validity = [stream, validity_size_check]( device_json_column& json_col) -> std::pair<rmm::device_buffer, size_type> { validity_size_check(json_col); auto null_count = cudf::detail::null_count( static_cast<bitmask_type*>(json_col.validity.data()), 0, json_col.num_rows, stream); // full null_mask is always required for parse_data return {std::move(json_col.validity), null_count}; // Note: json_col modified here, moves this memory }; auto get_child_schema = [schema](auto child_name) -> std::optional<schema_element> { if (schema.has_value()) { auto const result = schema.value().child_types.find(child_name); if (result != std::end(schema.value().child_types)) { return result->second; } } return {}; }; switch (json_col.type) { case json_col_t::StringColumn: { // move string_offsets to GPU and transform to string column auto const col_size = json_col.string_offsets.size(); using char_length_pair_t = thrust::pair<char const*, size_type>; CUDF_EXPECTS(json_col.string_offsets.size() == json_col.string_lengths.size(), "string offset, string length mismatch"); rmm::device_uvector<char_length_pair_t> d_string_data(col_size, stream); // TODO how about directly storing pair<char*, size_t> in json_column? auto offset_length_it = thrust::make_zip_iterator(json_col.string_offsets.begin(), json_col.string_lengths.begin()); data_type target_type{}; if (schema.has_value()) { #ifdef NJP_DEBUG_PRINT std::cout << "-> explicit type: " << (schema.has_value() ? std::to_string(static_cast<int>(schema->type.id())) : "n/a"); #endif target_type = schema.value().type; } // Infer column type, if we don't have an explicit type for it else { target_type = cudf::io::detail::infer_data_type( options.json_view(), d_input, offset_length_it, col_size, stream); } auto [result_bitmask, null_count] = make_validity(json_col); // Convert strings to the inferred data type auto col = parse_data(d_input.data(), offset_length_it, col_size, target_type, std::move(result_bitmask), null_count, options.view(), stream, mr); // Reset nullable if we do not have nulls // This is to match the existing JSON reader's behaviour: // - Non-string columns will always be returned as nullable // - String columns will be returned as nullable, iff there's at least one null entry if (target_type.id() == type_id::STRING and col->null_count() == 0) { col->set_null_mask(rmm::device_buffer{0, stream, mr}, 0); } // For string columns return ["offsets", "char"] schema if (target_type.id() == type_id::STRING) { return {std::move(col), std::vector<column_name_info>{{"offsets"}, {"chars"}}}; } // Non-string leaf-columns (e.g., numeric) do not have child columns in the schema return {std::move(col), std::vector<column_name_info>{}}; } case json_col_t::StructColumn: { std::vector<std::unique_ptr<column>> child_columns; std::vector<column_name_info> column_names{}; size_type num_rows{json_col.num_rows}; // Create children columns for (auto const& col_name : json_col.column_order) { auto const& col = json_col.child_columns.find(col_name); column_names.emplace_back(col->first); auto& child_col = col->second; auto [child_column, names] = device_json_column_to_cudf_column( child_col, d_input, options, get_child_schema(col_name), stream, mr); CUDF_EXPECTS(num_rows == child_column->size(), "All children columns must have the same size"); child_columns.push_back(std::move(child_column)); column_names.back().children = names; } auto [result_bitmask, null_count] = make_validity(json_col); // The null_mask is set after creation of struct column is to skip the superimpose_nulls and // null validation applied in make_structs_column factory, which is not needed for json auto ret_col = make_structs_column(num_rows, std::move(child_columns), 0, {}, stream, mr); ret_col->set_null_mask(std::move(result_bitmask), null_count); return {std::move(ret_col), column_names}; } case json_col_t::ListColumn: { size_type num_rows = json_col.child_offsets.size() - 1; std::vector<column_name_info> column_names{}; column_names.emplace_back("offsets"); column_names.emplace_back( json_col.child_columns.empty() ? list_child_name : json_col.child_columns.begin()->first); // Note: json_col modified here, reuse the memory auto offsets_column = std::make_unique<column>(data_type{type_id::INT32}, num_rows + 1, json_col.child_offsets.release(), rmm::device_buffer{}, 0); // Create children column auto [child_column, names] = json_col.child_columns.empty() ? std::pair<std::unique_ptr<column>, // EMPTY type could not used because gather throws exception on EMPTY type. std::vector<column_name_info>>{std::make_unique<column>( data_type{type_id::INT8}, 0, rmm::device_buffer{}, rmm::device_buffer{}, 0), std::vector<column_name_info>{}} : device_json_column_to_cudf_column( json_col.child_columns.begin()->second, d_input, options, get_child_schema(json_col.child_columns.begin()->first), stream, mr); column_names.back().children = names; auto [result_bitmask, null_count] = make_validity(json_col); auto ret_col = make_lists_column(num_rows, std::move(offsets_column), std::move(child_column), 0, rmm::device_buffer{0, stream, mr}, stream, mr); // The null_mask is set after creation of list column is to skip the purge_nonempty_nulls and // null validation applied in make_lists_column factory, which is not needed for json // parent column cannot be null when its children is non-empty in JSON ret_col->set_null_mask(std::move(result_bitmask), null_count); return {std::move(ret_col), std::move(column_names)}; } default: CUDF_FAIL("Unsupported column type"); break; } } table_with_metadata device_parse_nested_json(device_span<SymbolT const> d_input, cudf::io::json_reader_options const& options, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { CUDF_FUNC_RANGE(); auto gpu_tree = [&]() { // Parse the JSON and get the token stream const auto [tokens_gpu, token_indices_gpu] = get_token_stream(d_input, options, stream, rmm::mr::get_current_device_resource()); // gpu tree generation return get_tree_representation( tokens_gpu, token_indices_gpu, stream, rmm::mr::get_current_device_resource()); }(); // IILE used to free memory of token data. #ifdef NJP_DEBUG_PRINT auto h_input = cudf::detail::make_host_vector_async(d_input, stream); print_tree(h_input, gpu_tree, stream); #endif bool const is_array_of_arrays = [&]() { std::array<node_t, 2> h_node_categories = {NC_ERR, NC_ERR}; auto const size_to_copy = std::min(size_t{2}, gpu_tree.node_categories.size()); CUDF_CUDA_TRY(cudaMemcpyAsync(h_node_categories.data(), gpu_tree.node_categories.data(), sizeof(node_t) * size_to_copy, cudaMemcpyDefault, stream.value())); stream.synchronize(); if (options.is_enabled_lines()) return h_node_categories[0] == NC_LIST; return h_node_categories[0] == NC_LIST and h_node_categories[1] == NC_LIST; }(); auto [gpu_col_id, gpu_row_offsets] = records_orient_tree_traversal(d_input, gpu_tree, is_array_of_arrays, options.is_enabled_lines(), stream, rmm::mr::get_current_device_resource()); device_json_column root_column(stream, mr); root_column.type = json_col_t::ListColumn; root_column.child_offsets.resize(2, stream); thrust::fill(rmm::exec_policy(stream), root_column.child_offsets.begin(), root_column.child_offsets.end(), 0); // Get internal JSON column make_device_json_column(d_input, gpu_tree, gpu_col_id, gpu_row_offsets, root_column, is_array_of_arrays, options.is_enabled_lines(), stream, mr); // data_root refers to the root column of the data represented by the given JSON string auto& data_root = options.is_enabled_lines() ? root_column : root_column.child_columns.begin()->second; // Zero row entries if (data_root.type == json_col_t::ListColumn && data_root.child_columns.empty()) { return table_with_metadata{std::make_unique<table>(std::vector<std::unique_ptr<column>>{})}; } // Verify that we were in fact given a list of structs (or in JSON speech: an array of objects) auto constexpr single_child_col_count = 1; CUDF_EXPECTS(data_root.type == json_col_t::ListColumn and data_root.child_columns.size() == single_child_col_count and data_root.child_columns.begin()->second.type == (is_array_of_arrays ? json_col_t::ListColumn : json_col_t::StructColumn), "Input needs to be an array of arrays or an array of (nested) objects"); // Slice off the root list column, which has only a single row that contains all the structs auto& root_struct_col = data_root.child_columns.begin()->second; // Initialize meta data to be populated while recursing through the tree of columns std::vector<std::unique_ptr<column>> out_columns; std::vector<column_name_info> out_column_names; auto parse_opt = parsing_options(options, stream); // Iterate over the struct's child columns and convert to cudf column size_type column_index = 0; for (auto const& col_name : root_struct_col.column_order) { auto& json_col = root_struct_col.child_columns.find(col_name)->second; // Insert this columns name into the schema out_column_names.emplace_back(col_name); std::optional<schema_element> child_schema_element = std::visit( cudf::detail::visitor_overload{ [column_index](std::vector<data_type> const& user_dtypes) -> std::optional<schema_element> { return (static_cast<std::size_t>(column_index) < user_dtypes.size()) ? std::optional<schema_element>{{user_dtypes[column_index]}} : std::optional<schema_element>{}; }, [col_name]( std::map<std::string, data_type> const& user_dtypes) -> std::optional<schema_element> { return (user_dtypes.find(col_name) != std::end(user_dtypes)) ? std::optional<schema_element>{{user_dtypes.find(col_name)->second}} : std::optional<schema_element>{}; }, [col_name](std::map<std::string, schema_element> const& user_dtypes) -> std::optional<schema_element> { return (user_dtypes.find(col_name) != std::end(user_dtypes)) ? user_dtypes.find(col_name)->second : std::optional<schema_element>{}; }}, options.get_dtypes()); #ifdef NJP_DEBUG_PRINT auto debug_schema_print = [](auto ret) { std::cout << ", type id: " << (ret.has_value() ? std::to_string(static_cast<int>(ret->type.id())) : "n/a") << ", with " << (ret.has_value() ? ret->child_types.size() : 0) << " children" << "\n"; }; std::visit( cudf::detail::visitor_overload{[column_index](std::vector<data_type> const&) { std::cout << "Column by index: #" << column_index; }, [col_name](std::map<std::string, data_type> const&) { std::cout << "Column by flat name: '" << col_name; }, [col_name](std::map<std::string, schema_element> const&) { std::cout << "Column by nested name: #" << col_name; }}, options.get_dtypes()); debug_schema_print(child_schema_element); #endif // Get this JSON column's cudf column and schema info, (modifies json_col) auto [cudf_col, col_name_info] = device_json_column_to_cudf_column( json_col, d_input, parse_opt, child_schema_element, stream, mr); // TODO: RangeIndex as DataFrame.columns names for array of arrays // if (is_array_of_arrays) { // col_name_info.back().name = ""; // } out_column_names.back().children = std::move(col_name_info); out_columns.emplace_back(std::move(cudf_col)); column_index++; } return table_with_metadata{std::make_unique<table>(std::move(out_columns)), {out_column_names}}; } } // namespace cudf::io::json::detail
0
rapidsai_public_repos/cudf/cpp/src/io/json
rapidsai_public_repos/cudf/cpp/src/io/json/legacy/json_gpu.hpp
/* * Copyright (c) 2020-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #pragma once #include <io/utilities/column_type_histogram.hpp> #include <io/utilities/parsing_utils.cuh> #include <hash/concurrent_unordered_map.cuh> #include <cudf/table/table_device_view.cuh> #include <cudf/types.hpp> #include <cudf/utilities/span.hpp> #include <rmm/cuda_stream_view.hpp> #include <thrust/optional.h> using cudf::device_span; namespace cudf::io::json::detail::legacy { using col_map_type = concurrent_unordered_map<uint32_t, cudf::size_type>; /** * @brief Convert a buffer of input data (text) into raw cuDF column data. * * @param[in] options A set of parsing options * @param[in] data The entire data to read * @param[in] row_offsets The start of each data record * @param[in] dtypes The data type of each column * @param[in] col_map Pointer to the (column name hash -> column index) map in device memory. * nullptr is passed when the input file does not consist of objects. * @param[out] output_columns The output column data * @param[out] valid_fields The bitmaps indicating whether column fields are valid * @param[out] num_valid_fields The numbers of valid fields in columns * @param[in] stream CUDA stream used for device memory operations and kernel launches. */ void convert_json_to_columns(parse_options_view const& options, device_span<char const> data, device_span<uint64_t const> row_offsets, device_span<data_type const> column_types, col_map_type* col_map, device_span<void* const> output_columns, device_span<bitmask_type* const> valid_fields, device_span<cudf::size_type> num_valid_fields, rmm::cuda_stream_view stream); /** * @brief Process a buffer of data and determine information about the column types within. * * @param[in] options A set of parsing options * @param[in] data Input data buffer * @param[in] row_offsets The offset of each row in the input * @param[in] num_columns The number of columns of input data * @param[in] col_map Pointer to the (column name hash -> column index) map in device memory. * nullptr is passed when the input file does not consist of objects. * @param[in] stream CUDA stream used for device memory operations and kernel launches. * * @returns The count for each column data type */ std::vector<cudf::io::column_type_histogram> detect_data_types( parse_options_view const& options, device_span<char const> data, device_span<uint64_t const> row_offsets, bool do_set_null_count, int num_columns, col_map_type* col_map, rmm::cuda_stream_view stream); /** * @brief Collects information about JSON object keys in the file. * * @param[in] options A set of parsing options * @param[in] data Input data buffer * @param[in] row_offsets The offset of each row in the input * @param[out] keys_cnt Number of keys found in the file * @param[out] keys_info optional, information (offset, length, hash) for each found key * @param[in] stream CUDA stream used for device memory operations and kernel launches. */ void collect_keys_info(parse_options_view const& options, device_span<char const> data, device_span<uint64_t const> row_offsets, unsigned long long int* keys_cnt, thrust::optional<mutable_table_device_view> keys_info, rmm::cuda_stream_view stream); } // namespace cudf::io::json::detail::legacy
0
rapidsai_public_repos/cudf/cpp/src/io/json
rapidsai_public_repos/cudf/cpp/src/io/json/legacy/read_json.hpp
/* * Copyright (c) 2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include <cudf/types.hpp> #include <rmm/cuda_stream_view.hpp> #include <thrust/mr/memory_resource.h> #include <memory> #include <vector> namespace cudf::io::json::detail::legacy { table_with_metadata read_json(host_span<std::unique_ptr<datasource>> sources, json_reader_options const& reader_opts, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr); } // namespace cudf::io::json::detail::legacy
0
rapidsai_public_repos/cudf/cpp/src/io/json
rapidsai_public_repos/cudf/cpp/src/io/json/legacy/reader_impl.cu
/* * Copyright (c) 2020-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "json_gpu.hpp" #include <hash/concurrent_unordered_map.cuh> #include <io/comp/io_uncomp.hpp> #include <io/utilities/column_buffer.hpp> #include <io/utilities/parsing_utils.cuh> #include <cudf/column/column_factories.hpp> #include <cudf/detail/nvtx/ranges.hpp> #include <cudf/detail/utilities/vector_factories.hpp> #include <cudf/detail/utilities/visitor_overload.hpp> #include <cudf/groupby.hpp> #include <cudf/io/datasource.hpp> #include <cudf/io/detail/json.hpp> #include <cudf/io/json.hpp> #include <cudf/sorting.hpp> #include <cudf/strings/detail/replace.hpp> #include <cudf/table/table.hpp> #include <cudf/types.hpp> #include <cudf/utilities/error.hpp> #include <cudf/utilities/span.hpp> #include <rmm/cuda_stream_view.hpp> #include <rmm/device_scalar.hpp> #include <rmm/device_uvector.hpp> #include <rmm/exec_policy.hpp> #include <thrust/for_each.h> #include <thrust/functional.h> #include <thrust/host_vector.h> #include <thrust/iterator/constant_iterator.h> #include <thrust/iterator/counting_iterator.h> #include <thrust/optional.h> #include <thrust/pair.h> #include <thrust/sort.h> #include <thrust/transform.h> using cudf::host_span; namespace cudf::io::json::detail::legacy { using col_map_ptr_type = std::unique_ptr<col_map_type, std::function<void(col_map_type*)>>; /** * @brief Aggregate the table containing keys info by their hash values. * * @param[in] info Table with columns containing key offsets, lengths and hashes, respectively * * @return Table with data aggregated by key hash values */ std::unique_ptr<table> aggregate_keys_info(std::unique_ptr<table> info) { auto const info_view = info->view(); std::vector<groupby::aggregation_request> requests; requests.emplace_back(groupby::aggregation_request{info_view.column(0)}); requests.back().aggregations.emplace_back(make_min_aggregation<groupby_aggregation>()); requests.back().aggregations.emplace_back(make_nth_element_aggregation<groupby_aggregation>(0)); requests.emplace_back(groupby::aggregation_request{info_view.column(1)}); requests.back().aggregations.emplace_back(make_min_aggregation<groupby_aggregation>()); requests.back().aggregations.emplace_back(make_nth_element_aggregation<groupby_aggregation>(0)); // Aggregate by hash values groupby::groupby gb_obj( table_view({info_view.column(2)}), null_policy::EXCLUDE, sorted::NO, {}, {}); auto result = gb_obj.aggregate(requests); // TODO: no stream parameter? std::vector<std::unique_ptr<column>> out_columns; out_columns.emplace_back(std::move(result.second[0].results[0])); // offsets out_columns.emplace_back(std::move(result.second[1].results[0])); // lengths out_columns.emplace_back(std::move(result.first->release()[0])); // hashes return std::make_unique<table>(std::move(out_columns)); } /** * @brief Initializes the (key hash -> column index) hash map. */ col_map_ptr_type create_col_names_hash_map(column_view column_name_hashes, rmm::cuda_stream_view stream) { auto key_col_map = col_map_type::create(column_name_hashes.size(), stream); auto const column_data = column_name_hashes.data<uint32_t>(); thrust::for_each_n(rmm::exec_policy(stream), thrust::make_counting_iterator<size_type>(0), column_name_hashes.size(), [map = *key_col_map, column_data] __device__(size_type idx) mutable { map.insert(thrust::make_pair(column_data[idx], idx)); }); return key_col_map; } /** * @brief Create a table whose columns contain the information on JSON objects' keys. * * The columns contain name offsets in the file, name lengths and name hashes, respectively. * * @param[in] options Parsing options (e.g. delimiter and quotation character) * @param[in] data Input JSON device data * @param[in] row_offsets Device array of row start locations in the input buffer * @param[in] stream CUDA stream used for device memory operations and kernel launches * * @return std::unique_ptr<table> cudf table with three columns (offsets, lengths, hashes) */ std::unique_ptr<table> create_json_keys_info_table(parse_options_view const& parse_opts, device_span<char const> const data, device_span<uint64_t const> const row_offsets, rmm::cuda_stream_view stream) { // Count keys rmm::device_scalar<unsigned long long int> key_counter(0, stream); collect_keys_info(parse_opts, data, row_offsets, key_counter.data(), {}, stream); // Allocate columns to store hash value, length, and offset of each JSON object key in the input auto const num_keys = key_counter.value(stream); std::vector<std::unique_ptr<column>> info_columns; info_columns.emplace_back( make_numeric_column(data_type(type_id::UINT64), num_keys, mask_state::UNALLOCATED, stream)); info_columns.emplace_back( make_numeric_column(data_type(type_id::UINT16), num_keys, mask_state::UNALLOCATED, stream)); info_columns.emplace_back( make_numeric_column(data_type(type_id::UINT32), num_keys, mask_state::UNALLOCATED, stream)); // Create a table out of these columns to pass them around more easily auto info_table = std::make_unique<table>(std::move(info_columns)); auto const info_table_mdv = mutable_table_device_view::create(info_table->mutable_view(), stream); // Reset the key counter - now used for indexing key_counter.set_value_to_zero_async(stream); // Fill the allocated columns collect_keys_info(parse_opts, data, row_offsets, key_counter.data(), {*info_table_mdv}, stream); return info_table; } /** * @brief Extract the keys from the JSON file the name offsets/lengths. */ std::vector<std::string> create_key_strings(char const* h_data, table_view sorted_info, rmm::cuda_stream_view stream) { auto const num_cols = sorted_info.num_rows(); std::vector<uint64_t> h_offsets(num_cols); CUDF_CUDA_TRY(cudaMemcpyAsync(h_offsets.data(), sorted_info.column(0).data<uint64_t>(), sizeof(uint64_t) * num_cols, cudaMemcpyDefault, stream.value())); std::vector<uint16_t> h_lens(num_cols); CUDF_CUDA_TRY(cudaMemcpyAsync(h_lens.data(), sorted_info.column(1).data<uint16_t>(), sizeof(uint16_t) * num_cols, cudaMemcpyDefault, stream.value())); std::vector<std::string> names(num_cols); std::transform(h_offsets.cbegin(), h_offsets.cend(), h_lens.cbegin(), names.begin(), [&](auto offset, auto len) { return std::string(h_data + offset, len); }); return names; } auto sort_keys_info_by_offset(std::unique_ptr<table> info) { auto const agg_offset_col_view = info->get_column(0).view(); return sort_by_key(info->view(), table_view({agg_offset_col_view})); } /** * @brief Extract JSON object keys from a JSON file. * * @param[in] stream CUDA stream used for device memory operations and kernel launches. * * @return Names of JSON object keys in the file */ std::pair<std::vector<std::string>, col_map_ptr_type> get_json_object_keys_hashes( parse_options_view const& parse_opts, host_span<char const> h_data, device_span<uint64_t const> rec_starts, device_span<char const> d_data, rmm::cuda_stream_view stream) { auto info = create_json_keys_info_table(parse_opts, d_data, rec_starts, stream); auto aggregated_info = aggregate_keys_info(std::move(info)); auto sorted_info = sort_keys_info_by_offset(std::move(aggregated_info)); return {create_key_strings(h_data.data(), sorted_info->view(), stream), create_col_names_hash_map(sorted_info->get_column(2).view(), stream)}; } std::vector<uint8_t> ingest_raw_input(host_span<std::unique_ptr<datasource>> sources, compression_type compression, size_t range_offset, size_t range_size, size_t range_size_padded) { CUDF_FUNC_RANGE(); // Iterate through the user defined sources and read the contents into the local buffer size_t total_source_size = 0; for (auto const& source : sources) { total_source_size += source->size(); } total_source_size = total_source_size - (range_offset * sources.size()); auto buffer = std::vector<uint8_t>(total_source_size); size_t bytes_read = 0; for (auto const& source : sources) { if (!source->is_empty()) { auto data_size = (range_size_padded != 0) ? range_size_padded : source->size(); auto destination = buffer.data() + bytes_read; bytes_read += source->host_read(range_offset, data_size, destination); } } if (compression == compression_type::NONE) { return buffer; } else { return decompress(compression, buffer); } } bool should_load_whole_source(json_reader_options const& reader_opts) { return reader_opts.get_byte_range_offset() == 0 and // reader_opts.get_byte_range_size() == 0; } rmm::device_uvector<uint64_t> find_record_starts(json_reader_options const& reader_opts, host_span<char const> h_data, device_span<char const> d_data, rmm::cuda_stream_view stream) { std::vector<char> chars_to_count{'\n'}; // Currently, ignoring lineterminations within quotes is handled by recording the records of both, // and then filtering out the records that is a quotechar or a linetermination within a quotechar // pair. // If not starting at an offset, add an extra row to account for the first row in the file cudf::size_type prefilter_count = ((reader_opts.get_byte_range_offset() == 0) ? 1 : 0); if (should_load_whole_source(reader_opts)) { prefilter_count += count_all_from_set(d_data, chars_to_count, stream); } else { prefilter_count += count_all_from_set(h_data, chars_to_count, stream); } rmm::device_uvector<uint64_t> rec_starts(prefilter_count, stream); auto* find_result_ptr = rec_starts.data(); // Manually adding an extra row to account for the first row in the file if (reader_opts.get_byte_range_offset() == 0) { find_result_ptr++; CUDF_CUDA_TRY(cudaMemsetAsync(rec_starts.data(), 0ull, sizeof(uint64_t), stream.value())); } std::vector<char> chars_to_find{'\n'}; // Passing offset = 1 to return positions AFTER the found character if (should_load_whole_source(reader_opts)) { find_all_from_set(d_data, chars_to_find, 1, find_result_ptr, stream); } else { find_all_from_set(h_data, chars_to_find, 1, find_result_ptr, stream); } // Previous call stores the record positions as encountered by all threads // Sort the record positions as subsequent processing may require filtering // certain rows or other processing on specific records thrust::sort(rmm::exec_policy(stream), rec_starts.begin(), rec_starts.end()); auto filtered_count = prefilter_count; // Exclude the ending newline as it does not precede a record start if (h_data.back() == '\n') { filtered_count--; } rec_starts.resize(filtered_count, stream); return rec_starts; } /** * @brief Uploads the relevant segment of the input json data onto the GPU. * * Sets the d_data_ data member. * Only rows that need to be parsed are copied, based on the byte range * Also updates the array of record starts to match the device data offset. */ rmm::device_uvector<char> upload_data_to_device(json_reader_options const& reader_opts, host_span<char const> h_data, rmm::device_uvector<uint64_t>& rec_starts, rmm::cuda_stream_view stream) { CUDF_FUNC_RANGE(); size_t end_offset = h_data.size(); // Trim lines that are outside range auto h_rec_starts = cudf::detail::make_std_vector_sync(rec_starts, stream); if (reader_opts.get_byte_range_size() != 0) { auto it = h_rec_starts.end() - 1; while (it >= h_rec_starts.begin() && *it > reader_opts.get_byte_range_size()) { end_offset = *it; --it; } h_rec_starts.erase(it + 1, h_rec_starts.end()); } // Resize to exclude rows outside of the range // Adjust row start positions to account for the data subcopy size_t start_offset = h_rec_starts.front(); rec_starts.resize(h_rec_starts.size(), stream); thrust::transform(rmm::exec_policy(stream), rec_starts.begin(), rec_starts.end(), thrust::make_constant_iterator(start_offset), rec_starts.begin(), thrust::minus<uint64_t>()); size_t const bytes_to_upload = end_offset - start_offset; CUDF_EXPECTS(bytes_to_upload <= h_data.size(), "Error finding the record within the specified byte range.\n"); // Upload the raw data that is within the rows of interest return cudf::detail::make_device_uvector_async( h_data.subspan(start_offset, bytes_to_upload), stream, rmm::mr::get_current_device_resource()); } std::pair<std::vector<std::string>, col_map_ptr_type> get_column_names_and_map( parse_options_view const& parse_opts, host_span<char const> h_data, device_span<uint64_t const> rec_starts, device_span<char const> d_data, rmm::cuda_stream_view stream) { // If file only contains one row, use the file size for the row size uint64_t first_row_len = d_data.size(); if (rec_starts.size() > 1) { // Set first_row_len to the offset of the second row, if it exists CUDF_CUDA_TRY(cudaMemcpyAsync( &first_row_len, rec_starts.data() + 1, sizeof(uint64_t), cudaMemcpyDefault, stream.value())); } std::vector<char> first_row(first_row_len); CUDF_CUDA_TRY(cudaMemcpyAsync(first_row.data(), d_data.data(), first_row_len * sizeof(char), cudaMemcpyDefault, stream.value())); stream.synchronize(); // Determine the row format between: // JSON array - [val1, val2, ...] and // JSON object - {"col1":val1, "col2":val2, ...} // based on the top level opening bracket auto const first_square_bracket = std::find(first_row.begin(), first_row.end(), '['); auto const first_curly_bracket = std::find(first_row.begin(), first_row.end(), '{'); CUDF_EXPECTS(first_curly_bracket != first_row.end() || first_square_bracket != first_row.end(), "Input data is not a valid JSON file."); // If the first opening bracket is '{', assume object format if (first_curly_bracket < first_square_bracket) { // use keys as column names if input rows are objects return get_json_object_keys_hashes(parse_opts, h_data, rec_starts, d_data, stream); } else { int cols_found = 0; bool quotation = false; auto column_names = std::vector<std::string>(); for (size_t pos = 0; pos < first_row.size(); ++pos) { // Flip the quotation flag if current character is a quotechar if (first_row[pos] == parse_opts.quotechar) { quotation = !quotation; } // Check if end of a column/row else if (pos == first_row.size() - 1 || (!quotation && first_row[pos] == parse_opts.delimiter)) { column_names.emplace_back(std::to_string(cols_found++)); } } return {column_names, col_map_type::create(0, stream)}; } } std::vector<data_type> get_data_types(json_reader_options const& reader_opts, parse_options_view const& parse_opts, std::vector<std::string> const& column_names, col_map_type* column_map, device_span<uint64_t const> rec_starts, device_span<char const> data, rmm::cuda_stream_view stream) { bool has_to_infer_column_types = std::visit([](auto const& dtypes) { return dtypes.empty(); }, reader_opts.get_dtypes()); if (!has_to_infer_column_types) { return std::visit( cudf::detail::visitor_overload{ [&](std::vector<data_type> const& dtypes) { CUDF_EXPECTS(dtypes.size() == column_names.size(), "Must specify types for all columns"); return dtypes; }, [&](std::map<std::string, data_type> const& dtypes) { std::vector<data_type> sorted_dtypes; std::transform(std::cbegin(column_names), std::cend(column_names), std::back_inserter(sorted_dtypes), [&](auto const& column_name) { auto const it = dtypes.find(column_name); CUDF_EXPECTS(it != dtypes.end(), "Must specify types for all columns"); return it->second; }); return sorted_dtypes; }, [&](std::map<std::string, schema_element> const& dtypes) { std::vector<data_type> sorted_dtypes; std::transform(std::cbegin(column_names), std::cend(column_names), std::back_inserter(sorted_dtypes), [&](auto const& column_name) { auto const it = dtypes.find(column_name); CUDF_EXPECTS(it != dtypes.end(), "Must specify types for all columns"); return it->second.type; }); return sorted_dtypes; }}, reader_opts.get_dtypes()); } else { CUDF_EXPECTS(not rec_starts.empty(), "No data available for data type inference.\n"); auto const num_columns = column_names.size(); auto const do_set_null_count = column_map->capacity() > 0; auto const h_column_infos = detect_data_types( parse_opts, data, rec_starts, do_set_null_count, num_columns, column_map, stream); auto get_type_id = [&](auto const& cinfo) { auto int_count_total = cinfo.big_int_count + cinfo.negative_small_int_count + cinfo.positive_small_int_count; if (cinfo.null_count == static_cast<int>(rec_starts.size())) { // Entire column is NULL; allocate the smallest amount of memory return type_id::INT8; } else if (cinfo.string_count > 0) { return type_id::STRING; } else if (cinfo.datetime_count > 0) { return type_id::TIMESTAMP_MILLISECONDS; } else if (cinfo.float_count > 0) { return type_id::FLOAT64; } else if (cinfo.big_int_count == 0 && int_count_total != 0) { return type_id::INT64; } else if (cinfo.big_int_count != 0 && cinfo.negative_small_int_count != 0) { return type_id::STRING; } else if (cinfo.big_int_count != 0) { return type_id::UINT64; } else if (cinfo.bool_count > 0) { return type_id::BOOL8; } else { CUDF_FAIL("Data type detection failed.\n"); } }; std::vector<data_type> dtypes; std::transform(std::cbegin(h_column_infos), std::cend(h_column_infos), std::back_inserter(dtypes), [&](auto const& cinfo) { return data_type{get_type_id(cinfo)}; }); return dtypes; } } table_with_metadata convert_data_to_table(parse_options_view const& parse_opts, std::vector<data_type> const& dtypes, std::vector<std::string>&& column_names, col_map_type* column_map, device_span<uint64_t const> rec_starts, device_span<char const> data, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { auto const num_columns = dtypes.size(); auto const num_records = rec_starts.size(); // alloc output buffers. std::vector<cudf::io::detail::column_buffer> out_buffers; for (size_t col = 0; col < num_columns; ++col) { out_buffers.emplace_back(dtypes[col], num_records, true, stream, mr); } thrust::host_vector<data_type> h_dtypes(num_columns); thrust::host_vector<void*> h_data(num_columns); thrust::host_vector<bitmask_type*> h_valid(num_columns); for (size_t i = 0; i < num_columns; ++i) { h_dtypes[i] = dtypes[i]; h_data[i] = out_buffers[i].data(); h_valid[i] = out_buffers[i].null_mask(); } auto d_dtypes = cudf::detail::make_device_uvector_async<data_type>( h_dtypes, stream, rmm::mr::get_current_device_resource()); auto d_data = cudf::detail::make_device_uvector_async<void*>( h_data, stream, rmm::mr::get_current_device_resource()); auto d_valid = cudf::detail::make_device_uvector_async<cudf::bitmask_type*>( h_valid, stream, rmm::mr::get_current_device_resource()); auto d_valid_counts = cudf::detail::make_zeroed_device_uvector_async<cudf::size_type>( num_columns, stream, rmm::mr::get_current_device_resource()); convert_json_to_columns( parse_opts, data, rec_starts, d_dtypes, column_map, d_data, d_valid, d_valid_counts, stream); stream.synchronize(); // postprocess columns auto target_chars = std::vector<char>{'\\', '"', '\\', '\\', '\\', 't', '\\', 'r', '\\', 'b'}; auto target_offsets = std::vector<size_type>{0, 2, 4, 6, 8, 10}; auto repl_chars = std::vector<char>{'"', '\\', '\t', '\r', '\b'}; auto repl_offsets = std::vector<size_type>{0, 1, 2, 3, 4, 5}; auto target = make_strings_column(cudf::detail::make_device_uvector_async( target_chars, stream, rmm::mr::get_current_device_resource()), cudf::detail::make_device_uvector_async( target_offsets, stream, rmm::mr::get_current_device_resource()), {}, 0, stream); auto repl = make_strings_column(cudf::detail::make_device_uvector_async( repl_chars, stream, rmm::mr::get_current_device_resource()), cudf::detail::make_device_uvector_async( repl_offsets, stream, rmm::mr::get_current_device_resource()), {}, 0, stream); auto const h_valid_counts = cudf::detail::make_std_vector_sync(d_valid_counts, stream); std::vector<std::unique_ptr<column>> out_columns; for (size_t i = 0; i < num_columns; ++i) { out_buffers[i].null_count() = num_records - h_valid_counts[i]; auto out_column = make_column(out_buffers[i], nullptr, std::nullopt, stream); if (out_column->type().id() == type_id::STRING) { // Need to remove escape character in case of '\"' and '\\' out_columns.emplace_back(cudf::strings::detail::replace( out_column->view(), target->view(), repl->view(), stream, mr)); } else { out_columns.emplace_back(std::move(out_column)); } } std::vector<column_name_info> column_infos; column_infos.reserve(column_names.size()); std::transform(std::make_move_iterator(column_names.begin()), std::make_move_iterator(column_names.end()), std::back_inserter(column_infos), [](auto const& col_name) { return column_name_info{col_name}; }); // This is to ensure the stream-ordered make_stream_column calls above complete before // the temporary std::vectors are destroyed on exit from this function. stream.synchronize(); CUDF_EXPECTS(!out_columns.empty(), "No columns created from json input"); return table_with_metadata{std::make_unique<table>(std::move(out_columns)), {column_infos}}; } /** * @brief Read an entire set or a subset of data from the source * * @param[in] options reader options with Number of bytes offset from the start, * Bytes to read; use `0` for all remaining data * @param[in] stream CUDA stream used for device memory operations and kernel launches. * * @return Table and its metadata */ table_with_metadata read_json(host_span<std::unique_ptr<datasource>> sources, json_reader_options const& reader_opts, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { CUDF_EXPECTS(not sources.empty(), "No sources were defined"); CUDF_EXPECTS(sources.size() == 1 or reader_opts.get_compression() == compression_type::NONE, "Multiple compressed inputs are not supported"); CUDF_EXPECTS(reader_opts.is_enabled_lines(), "Only JSON Lines format is currently supported.\n"); auto parse_opts = parse_options{',', '\n', '\"', '.'}; parse_opts.trie_true = cudf::detail::create_serialized_trie({"true"}, stream); parse_opts.trie_false = cudf::detail::create_serialized_trie({"false"}, stream); parse_opts.trie_na = cudf::detail::create_serialized_trie({"", "null"}, stream); parse_opts.dayfirst = reader_opts.is_enabled_dayfirst(); auto range_offset = reader_opts.get_byte_range_offset(); auto range_size = reader_opts.get_byte_range_size(); auto range_size_padded = reader_opts.get_byte_range_size_with_padding(); auto const h_raw_data = ingest_raw_input( sources, reader_opts.get_compression(), range_offset, range_size, range_size_padded); host_span<char const> h_data{reinterpret_cast<char const*>(h_raw_data.data()), h_raw_data.size()}; CUDF_EXPECTS(not h_data.empty(), "Ingest failed: uncompressed input data has zero size.\n"); auto d_data = rmm::device_uvector<char>(0, stream); if (should_load_whole_source(reader_opts)) { d_data = cudf::detail::make_device_uvector_async( h_data, stream, rmm::mr::get_current_device_resource()); } auto rec_starts = find_record_starts(reader_opts, h_data, d_data, stream); CUDF_EXPECTS(rec_starts.size() > 0, "Error enumerating records.\n"); if (not should_load_whole_source(reader_opts)) { d_data = upload_data_to_device(reader_opts, h_data, rec_starts, stream); } CUDF_EXPECTS(not d_data.is_empty(), "Error uploading input data to the GPU.\n"); auto column_names_and_map = get_column_names_and_map(parse_opts.view(), h_data, rec_starts, d_data, stream); auto column_names = std::get<0>(column_names_and_map); auto column_map = std::move(std::get<1>(column_names_and_map)); CUDF_EXPECTS(not column_names.empty(), "Error determining column names.\n"); auto dtypes = get_data_types( reader_opts, parse_opts.view(), column_names, column_map.get(), rec_starts, d_data, stream); CUDF_EXPECTS(not dtypes.empty(), "Error in data type detection.\n"); return convert_data_to_table(parse_opts.view(), dtypes, std::move(column_names), column_map.get(), rec_starts, d_data, stream, mr); } } // namespace cudf::io::json::detail::legacy
0
rapidsai_public_repos/cudf/cpp/src/io/json
rapidsai_public_repos/cudf/cpp/src/io/json/legacy/json_gpu.cu
/* * Copyright (c) 2020-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "json_gpu.hpp" #include <io/utilities/column_type_histogram.hpp> #include <io/utilities/parsing_utils.cuh> #include <cudf/detail/utilities/cuda.cuh> #include <cudf/detail/utilities/vector_factories.hpp> #include <cudf/hashing/detail/murmurhash3_x86_32.cuh> #include <cudf/types.hpp> #include <cudf/utilities/bit.hpp> #include <cudf/utilities/span.hpp> #include <cudf/utilities/traits.hpp> #include <cudf/utilities/type_dispatcher.hpp> #include <io/utilities/trie.cuh> #include <rmm/cuda_stream_view.hpp> #include <rmm/device_buffer.hpp> #include <rmm/device_uvector.hpp> #include <rmm/exec_policy.hpp> #include <thrust/advance.h> #include <thrust/detail/copy.h> #include <thrust/execution_policy.h> #include <thrust/find.h> #include <thrust/generate.h> #include <thrust/iterator/reverse_iterator.h> #include <thrust/mismatch.h> #include <thrust/optional.h> #include <thrust/pair.h> using cudf::device_span; using cudf::detail::grid_1d; namespace cudf::io::json::detail::legacy { namespace { /** * @brief CUDA Kernel that adjusts the row range to exclude the character outside of the top level * brackets. * * The top level brackets characters are excluded from the resulting range. * * @param[in] begin Pointer to the first character in the row * @param[in] end pointer to the first character after the row */ __device__ std::pair<char const*, char const*> limit_range_to_brackets(char const* begin, char const* end) { auto const data_begin = thrust::next(thrust::find_if( thrust::seq, begin, end, [] __device__(auto c) { return c == '[' || c == '{'; })); auto const data_end = thrust::next(thrust::find_if(thrust::seq, thrust::make_reverse_iterator(end), thrust::make_reverse_iterator(data_begin), [](auto c) { return c == ']' || c == '}'; })) .base(); return {data_begin, data_end}; } /** * @brief Find the first JSON object key in the range. * * Assumes that begin is not in the middle of a field. * * @param[in] begin Pointer to the first character in the parsing range * @param[in] end pointer to the first character after the parsing range * @param[in] quotechar The character used to denote quotes * * @return Begin and end iterators of the key name; (`end`, `end`) if a key is not found */ __device__ std::pair<char const*, char const*> get_next_key(char const* begin, char const* end, char quotechar) { // Key starts after the first quote auto const key_begin = thrust::find(thrust::seq, begin, end, quotechar) + 1; if (key_begin > end) return {end, end}; // Key ends after the next unescaped quote auto const key_end_pair = thrust::mismatch( thrust::seq, key_begin, end - 1, key_begin + 1, [quotechar] __device__(auto prev_ch, auto ch) { return !(ch == quotechar && prev_ch != '\\'); }); return {key_begin, key_end_pair.second}; } /** * @brief Returns true is the input character is a valid digit. * Supports both decimal and hexadecimal digits (uppercase and lowercase). * * @param c Character to check * @param is_hex Whether to check as a hexadecimal * * @return `true` if it is digit-like, `false` otherwise */ __device__ __inline__ bool is_digit(char c, bool is_hex = false) { if (c >= '0' && c <= '9') return true; if (is_hex) { if (c >= 'A' && c <= 'F') return true; if (c >= 'a' && c <= 'f') return true; } return false; } /** * @brief Returns true if the counters indicate a potentially valid float. * False positives are possible because positions are not taken into account. * For example, field "e.123-" would match the pattern. */ __device__ __inline__ bool is_like_float( long len, long digit_cnt, long decimal_cnt, long dash_cnt, long exponent_cnt) { // Can't have more than one exponent and one decimal point if (decimal_cnt > 1) return false; if (exponent_cnt > 1) return false; // Without the exponent or a decimal point, this is an integer, not a float if (decimal_cnt == 0 && exponent_cnt == 0) return false; // Can only have one '-' per component if (dash_cnt > 1 + exponent_cnt) return false; // If anything other than these characters is present, it's not a float if (digit_cnt + decimal_cnt + dash_cnt + exponent_cnt != len) return false; // Needs at least 1 digit, 2 if exponent is present if (digit_cnt < 1 + exponent_cnt) return false; return true; } /** * @brief Contains information on a JSON file field. */ struct field_descriptor { cudf::size_type column; char const* value_begin; char const* value_end; bool is_quoted; }; /** * @brief Parse the first field in the given range and return its descriptor. * * @param[in] begin Pointer to the first character in the parsing range * @param[in] end pointer to the first character after the parsing range * @param[in] opts The global parsing behavior options * @param[in] field_idx Index of the current field in the input row * @param[in] col_map Pointer to the (column name hash -> column index) map in device memory. * nullptr is passed when the input file does not consist of objects. * @return Descriptor of the parsed field */ __device__ field_descriptor next_field_descriptor(char const* begin, char const* end, parse_options_view const& opts, cudf::size_type field_idx, col_map_type col_map) { auto const desc_pre_trim = col_map.capacity() == 0 // No key - column and begin are trivial ? field_descriptor{field_idx, begin, cudf::io::gpu::seek_field_end(begin, end, opts, true), false} : [&]() { auto const key_range = get_next_key(begin, end, opts.quotechar); auto const key_hash = cudf::hashing::detail::MurmurHash3_x86_32<cudf::string_view>{}( cudf::string_view(key_range.first, key_range.second - key_range.first)); auto const hash_col = col_map.find(key_hash); // Fall back to field index if not found (parsing error) auto const column = (hash_col != col_map.end()) ? (*hash_col).second : field_idx; // Skip the colon between the key and the value auto const value_begin = thrust::find(thrust::seq, key_range.second, end, ':') + 1; return field_descriptor{column, value_begin, cudf::io::gpu::seek_field_end(value_begin, end, opts, true), false}; }(); // Modify start & end to ignore whitespace and quotechars auto const trimmed_value_range = trim_whitespaces(desc_pre_trim.value_begin, desc_pre_trim.value_end); bool const is_quoted = thrust::distance(trimmed_value_range.first, trimmed_value_range.second) >= 2 and *trimmed_value_range.first == opts.quotechar and *thrust::prev(trimmed_value_range.second) == opts.quotechar; return {desc_pre_trim.column, trimmed_value_range.first + static_cast<std::ptrdiff_t>(is_quoted), trimmed_value_range.second - static_cast<std::ptrdiff_t>(is_quoted), is_quoted}; } /** * @brief Returns the range that contains the data in a given row. * * Excludes the top-level brackets. * * @param[in] data Device span pointing to the JSON data in device memory * @param[in] row_offsets The offset of each row in the input * @param[in] row Index of the row for which the range is returned * * @return The begin and end iterators of the row data. */ __device__ std::pair<char const*, char const*> get_row_data_range( device_span<char const> const data, device_span<uint64_t const> const row_offsets, size_type row) { auto const row_begin = data.begin() + row_offsets[row]; auto const row_end = data.begin() + ((row < row_offsets.size() - 1) ? row_offsets[row + 1] : data.size()); return limit_range_to_brackets(row_begin, row_end); } /** * @brief CUDA kernel that parses and converts plain text data into cuDF column data. * * Data is processed one record at a time * * @param[in] opts A set of parsing options * @param[in] data The entire data to read * @param[in] row_offsets The offset of each row in the input * @param[in] column_types The data type of each column * @param[in] col_map Pointer to the (column name hash -> column index) map in device memory. * nullptr is passed when the input file does not consist of objects. * @param[out] output_columns The output column data * @param[out] valid_fields The bitmaps indicating whether column fields are valid * @param[out] num_valid_fields The numbers of valid fields in columns */ __global__ void convert_data_to_columns_kernel(parse_options_view opts, device_span<char const> const data, device_span<uint64_t const> const row_offsets, device_span<data_type const> const column_types, col_map_type col_map, device_span<void* const> const output_columns, device_span<bitmask_type* const> const valid_fields, device_span<cudf::size_type> const num_valid_fields) { auto const rec_id = grid_1d::global_thread_id(); if (rec_id >= row_offsets.size()) return; auto const row_data_range = get_row_data_range(data, row_offsets, rec_id); auto current = row_data_range.first; for (size_type input_field_index = 0; input_field_index < column_types.size() && current < row_data_range.second; input_field_index++) { auto const desc = next_field_descriptor(current, row_data_range.second, opts, input_field_index, col_map); auto const value_len = static_cast<size_t>(std::max(desc.value_end - desc.value_begin, 0L)); auto const is_quoted = static_cast<std::ptrdiff_t>(desc.is_quoted); current = desc.value_end + 1; using string_index_pair = thrust::pair<char const*, size_type>; if (!serialized_trie_contains(opts.trie_na, {desc.value_begin - is_quoted, value_len + is_quoted * 2})) { // Type dispatcher does not handle strings if (column_types[desc.column].id() == type_id::STRING) { auto str_list = static_cast<string_index_pair*>(output_columns[desc.column]); str_list[rec_id].first = desc.value_begin; str_list[rec_id].second = value_len; // set the valid bitmap - all bits were set to 0 to start set_bit(valid_fields[desc.column], rec_id); atomicAdd(&num_valid_fields[desc.column], 1); } else { if (cudf::type_dispatcher(column_types[desc.column], ConvertFunctor{}, desc.value_begin, desc.value_end, output_columns[desc.column], rec_id, column_types[desc.column], opts, false)) { // set the valid bitmap - all bits were set to 0 to start set_bit(valid_fields[desc.column], rec_id); atomicAdd(&num_valid_fields[desc.column], 1); } } } else if (column_types[desc.column].id() == type_id::STRING) { auto str_list = static_cast<string_index_pair*>(output_columns[desc.column]); str_list[rec_id].first = nullptr; str_list[rec_id].second = 0; } } } /** * @brief CUDA kernel that processes a buffer of data and determines information about the * column types within. * * Data is processed in one row/record at a time, so the number of total * threads (tid) is equal to the number of rows. * * @param[in] opts A set of parsing options * @param[in] data Input data buffer * @param[in] rec_starts The offset of each row in the input * @param[in] col_map Pointer to the (column name hash -> column index) map in device memory. * nullptr is passed when the input file does not consist of objects. * @param[in] num_columns The number of columns of input data * @param[out] column_infos The count for each column data type */ __global__ void detect_data_types_kernel( parse_options_view const opts, device_span<char const> const data, device_span<uint64_t const> const row_offsets, col_map_type col_map, int num_columns, device_span<cudf::io::column_type_histogram> const column_infos) { auto const rec_id = grid_1d::global_thread_id(); if (rec_id >= row_offsets.size()) return; auto const are_rows_objects = col_map.capacity() != 0; auto const row_data_range = get_row_data_range(data, row_offsets, rec_id); size_type input_field_index = 0; for (auto current = row_data_range.first; input_field_index < num_columns && current < row_data_range.second; input_field_index++) { auto const desc = next_field_descriptor(current, row_data_range.second, opts, input_field_index, col_map); auto const value_len = static_cast<size_t>(std::max(desc.value_end - desc.value_begin, 0L)); // Advance to the next field; +1 to skip the delimiter current = desc.value_end + 1; // Checking if the field is empty/valid if (serialized_trie_contains(opts.trie_na, {desc.value_begin, value_len})) { // Increase the null count for array rows, where the null count is initialized to zero. if (!are_rows_objects) { atomicAdd(&column_infos[desc.column].null_count, 1); } continue; } else if (are_rows_objects) { // For files with object rows, null count is initialized to row count. The value is decreased // here for every valid field. atomicAdd(&column_infos[desc.column].null_count, -1); } // Don't need counts to detect strings, any field in quotes is deduced to be a string if (desc.is_quoted) { atomicAdd(&column_infos[desc.column].string_count, 1); continue; } int digit_count = 0; int decimal_count = 0; int slash_count = 0; int dash_count = 0; int plus_count = 0; int colon_count = 0; int exponent_count = 0; int other_count = 0; bool const maybe_hex = ((value_len > 2 && *desc.value_begin == '0' && *(desc.value_begin + 1) == 'x') || (value_len > 3 && *desc.value_begin == '-' && *(desc.value_begin + 1) == '0' && *(desc.value_begin + 2) == 'x')); for (auto pos = desc.value_begin; pos < desc.value_end; ++pos) { if (is_digit(*pos, maybe_hex)) { digit_count++; continue; } // Looking for unique characters that will help identify column types switch (*pos) { case '.': decimal_count++; break; case '-': dash_count++; break; case '+': plus_count++; break; case '/': slash_count++; break; case ':': colon_count++; break; case 'e': case 'E': if (!maybe_hex && pos > desc.value_begin && pos < desc.value_end - 1) exponent_count++; break; default: other_count++; break; } } // Integers have to have the length of the string int int_req_number_cnt = value_len; // Off by one if they start with a minus sign if ((*desc.value_begin == '-' || *desc.value_begin == '+') && value_len > 1) { --int_req_number_cnt; } // Off by one if they are a hexadecimal number if (maybe_hex) { --int_req_number_cnt; } if (serialized_trie_contains(opts.trie_true, {desc.value_begin, value_len}) || serialized_trie_contains(opts.trie_false, {desc.value_begin, value_len})) { atomicAdd(&column_infos[desc.column].bool_count, 1); } else if (digit_count == int_req_number_cnt) { bool is_negative = (*desc.value_begin == '-'); char const* data_begin = desc.value_begin + (is_negative || (*desc.value_begin == '+')); cudf::size_type* ptr = cudf::io::gpu::infer_integral_field_counter( data_begin, data_begin + digit_count, is_negative, column_infos[desc.column]); atomicAdd(ptr, 1); } else if (is_like_float( value_len, digit_count, decimal_count, dash_count + plus_count, exponent_count)) { atomicAdd(&column_infos[desc.column].float_count, 1); } // A date-time field cannot have more than 3 non-special characters // A number field cannot have more than one decimal point else if (other_count > 3 || decimal_count > 1) { atomicAdd(&column_infos[desc.column].string_count, 1); } else { // A date field can have either one or two '-' or '\'; A legal combination will only have one // of them To simplify the process of auto column detection, we are not covering all the // date-time formation permutations if ((dash_count > 0 && dash_count <= 2 && slash_count == 0) || (dash_count == 0 && slash_count > 0 && slash_count <= 2)) { if (colon_count <= 2) { atomicAdd(&column_infos[desc.column].datetime_count, 1); } else { atomicAdd(&column_infos[desc.column].string_count, 1); } } else { // Default field type is string atomicAdd(&column_infos[desc.column].string_count, 1); } } } if (!are_rows_objects) { // For array rows, mark missing fields as null for (; input_field_index < num_columns; ++input_field_index) atomicAdd(&column_infos[input_field_index].null_count, 1); } } /** * @brief Input data range that contains a field in key:value format. */ struct key_value_range { char const* key_begin; char const* key_end; char const* value_begin; char const* value_end; }; /** * @brief Parse the next field in key:value format and return ranges of its parts. */ __device__ key_value_range get_next_key_value_range(char const* begin, char const* end, parse_options_view const& opts) { auto const key_range = get_next_key(begin, end, opts.quotechar); // Colon between the key and the value auto const colon = thrust::find(thrust::seq, key_range.second, end, ':'); if (colon == end) return {end, end, end}; // Field value (including delimiters) auto const value_end = cudf::io::gpu::seek_field_end(colon + 1, end, opts, true); return {key_range.first, key_range.second, colon + 1, value_end}; } /** * @brief Cuda kernel that collects information about JSON object keys in the file. * * @param[in] options A set of parsing options * @param[in] data Input data buffer * @param[in] row_offsets The offset of each row in the input * @param[out] keys_cnt Number of keys found in the file * @param[out] keys_info optional, information (offset, length, hash) for each found key */ __global__ void collect_keys_info_kernel(parse_options_view const options, device_span<char const> const data, device_span<uint64_t const> const row_offsets, unsigned long long int* keys_cnt, thrust::optional<mutable_table_device_view> keys_info) { auto const rec_id = grid_1d::global_thread_id(); if (rec_id >= row_offsets.size()) return; auto const row_data_range = get_row_data_range(data, row_offsets, rec_id); auto advance = [&](char const* begin) { return get_next_key_value_range(begin, row_data_range.second, options); }; for (auto field_range = advance(row_data_range.first); field_range.key_begin < row_data_range.second; field_range = advance(field_range.value_end)) { auto const idx = atomicAdd(keys_cnt, 1); if (keys_info.has_value()) { auto const len = field_range.key_end - field_range.key_begin; keys_info->column(0).element<uint64_t>(idx) = field_range.key_begin - data.begin(); keys_info->column(1).element<uint16_t>(idx) = len; keys_info->column(2).element<uint32_t>(idx) = cudf::hashing::detail::MurmurHash3_x86_32<cudf::string_view>{}( cudf::string_view(field_range.key_begin, len)); } } } } // namespace /** * @copydoc cudf::io::json::detail::legacy::convert_json_to_columns */ void convert_json_to_columns(parse_options_view const& opts, device_span<char const> const data, device_span<uint64_t const> const row_offsets, device_span<data_type const> const column_types, col_map_type* col_map, device_span<void* const> const output_columns, device_span<bitmask_type* const> const valid_fields, device_span<cudf::size_type> num_valid_fields, rmm::cuda_stream_view stream) { int block_size; int min_grid_size; CUDF_CUDA_TRY(cudaOccupancyMaxPotentialBlockSize( &min_grid_size, &block_size, convert_data_to_columns_kernel)); int const grid_size = (row_offsets.size() + block_size - 1) / block_size; convert_data_to_columns_kernel<<<grid_size, block_size, 0, stream.value()>>>(opts, data, row_offsets, column_types, *col_map, output_columns, valid_fields, num_valid_fields); CUDF_CHECK_CUDA(stream.value()); } /** * @copydoc cudf::io::json::detail::legacy::detect_data_types */ std::vector<cudf::io::column_type_histogram> detect_data_types( parse_options_view const& options, device_span<char const> const data, device_span<uint64_t const> const row_offsets, bool do_set_null_count, int num_columns, col_map_type* col_map, rmm::cuda_stream_view stream) { int block_size; int min_grid_size; CUDF_CUDA_TRY( cudaOccupancyMaxPotentialBlockSize(&min_grid_size, &block_size, detect_data_types_kernel)); auto d_column_infos = [&]() { if (do_set_null_count) { rmm::device_uvector<cudf::io::column_type_histogram> d_column_infos(num_columns, stream); // Set the null count to the row count (all fields assumes to be null). thrust::generate( rmm::exec_policy(stream), d_column_infos.begin(), d_column_infos.end(), [num_records = static_cast<cudf::size_type>(row_offsets.size())] __device__() { return cudf::io::column_type_histogram{num_records}; }); return d_column_infos; } else { return cudf::detail::make_zeroed_device_uvector_async<cudf::io::column_type_histogram>( num_columns, stream, rmm::mr::get_current_device_resource()); } }(); // Calculate actual block count to use based on records count int const grid_size = (row_offsets.size() + block_size - 1) / block_size; detect_data_types_kernel<<<grid_size, block_size, 0, stream.value()>>>( options, data, row_offsets, *col_map, num_columns, d_column_infos); return cudf::detail::make_std_vector_sync(d_column_infos, stream); } /** * @copydoc cudf::io::json::detail::legacy::collect_keys_info */ void collect_keys_info(parse_options_view const& options, device_span<char const> const data, device_span<uint64_t const> const row_offsets, unsigned long long int* keys_cnt, thrust::optional<mutable_table_device_view> keys_info, rmm::cuda_stream_view stream) { int block_size; int min_grid_size; CUDF_CUDA_TRY( cudaOccupancyMaxPotentialBlockSize(&min_grid_size, &block_size, collect_keys_info_kernel)); // Calculate actual block count to use based on records count int const grid_size = (row_offsets.size() + block_size - 1) / block_size; collect_keys_info_kernel<<<grid_size, block_size, 0, stream.value()>>>( options, data, row_offsets, keys_cnt, keys_info); CUDF_CHECK_CUDA(stream.value()); } } // namespace cudf::io::json::detail::legacy
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/csv/csv_common.hpp
/* * Copyright (c) 2019-2022, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #pragma once #include <cstdint> #include <io/utilities/column_type_histogram.hpp> namespace cudf { namespace io { namespace csv { namespace column_parse { /** * @brief Per-column parsing flags used for dtype detection and data conversion */ enum : uint8_t { disabled = 0, ///< data is not read enabled = 1, ///< data is read and parsed as usual inferred = 2, ///< infer the dtype as_default = 4, ///< no special decoding as_hexadecimal = 8, ///< decode with base-16 as_datetime = 16, ///< decode as date and/or time }; using flags = uint8_t; } // namespace column_parse } // namespace csv } // namespace io } // namespace cudf
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/csv/durations.hpp
/* * Copyright (c) 2021-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #pragma once #include <cudf/types.hpp> #include <rmm/cuda_stream_view.hpp> #include <rmm/mr/device/per_device_resource.hpp> #include <memory> namespace cudf { namespace io { namespace detail { namespace csv { std::unique_ptr<column> pandas_format_durations(column_view const& durations, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr); } // namespace csv } // namespace detail } // namespace io } // namespace cudf
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/csv/csv_gpu.hpp
/* * Copyright (c) 2019-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #pragma once #include <io/utilities/parsing_utils.cuh> #include <cudf/types.hpp> #include <cudf/utilities/span.hpp> #include <rmm/cuda_stream_view.hpp> using cudf::device_span; namespace cudf { namespace io { namespace csv { namespace gpu { /** * CSV row parsing context * NONE: No special context (normal parsing) * QUOTE: Within a quoted field * COMMENT: Within a comment line (discard every character until terminator) * EOF: End state (EOF reached) */ enum { ROW_CTX_NONE = 0, ROW_CTX_QUOTE = 1, ROW_CTX_COMMENT = 2, ROW_CTX_EOF = 3 }; constexpr uint32_t rowofs_block_dim = 512; /// Character block size for gather_row_offsets constexpr uint32_t rowofs_block_bytes = rowofs_block_dim * 32; // 16KB/threadblock /** * Row parsing context with row count * Format: row_count * 4 + id, where `row_count` is the number of rows * in a character block, and `id` is the row parser state at the end of the block. */ using rowctx32_t = uint32_t; using rowctx64_t = uint64_t; /** * Packed row context format * * The 64-bit packed row context format represents the four possible output row context states * from each of the four possible input row context states. * Each rowctx32_t value is truncated to 20-bit (limiting the max number of rows * to 18-bit) and concatenated to form a 80-bit value, whose upper 16 bits are * always zero (EOF input state implies a zero row count) and therefore * stored as 64-bit. */ using packed_rowctx_t = uint64_t; /** * @brief return a row context from a {count, id} pair * * The 32-bit row context consists of the 2-bit parser state stored in the lower 2-bits * and a 30-bit row count in the upper 30 bits. */ inline __host__ __device__ rowctx32_t make_row_context(uint32_t row_count, uint32_t out_ctx) { return (row_count << 2) + out_ctx; } /** * @brief pack multiple row contexts together * * Pack four rowctx32_t values, where each value represents the output row context * for one of four possible input contexts when parsing a character block. * Each output state consists of the 2-bit row context state along with a 18-bit row count * value (row count is assumed to be a local count that fits in 18-bit) * The four 20-bit values are concatenated to form a 80-bit value, truncated to 64-bit * since a block starting in a EOF state can only have a zero row count (and the output * state corresponding to an EOF input state can only be EOF, so only the first 3 output * states are included as parameters, and the EOF->EOF state transition is hardcoded) */ constexpr __host__ __device__ packed_rowctx_t pack_row_contexts(rowctx32_t ctx0, rowctx32_t ctx1, rowctx32_t ctx2) { return (ctx0) | (static_cast<uint64_t>(ctx1) << 20) | (static_cast<uint64_t>(ctx2) << 40) | (static_cast<uint64_t>(ROW_CTX_EOF) << 60); } /** * @brief Unpack a row context (select one of the 4 contexts in packed form) */ inline __host__ __device__ rowctx32_t get_row_context(packed_rowctx_t packed_ctx, uint32_t ctxid) { return static_cast<rowctx32_t>((packed_ctx >> (ctxid * 20)) & ((1 << 20) - 1)); } /** * @brief Select the output row context from a given input context and a packed row * context corresponding to a block of characters, and return the new output context with * updated total row count. * The input context is a 64-bit version of the 32-bit single row context as returned * by make_row_context(), so the maximum row count here is a 62-bit value. * * @param sel_ctx input context (2-bit context id, 62-bit row count) * @param packed_ctx row context of character block * @return total_row_count * 4 + output context id */ inline __host__ __device__ rowctx64_t select_row_context(rowctx64_t sel_ctx, packed_rowctx_t packed_ctx) { auto ctxid = static_cast<uint32_t>(sel_ctx & 3); rowctx32_t ctx = get_row_context(packed_ctx, ctxid); return (sel_ctx & ~3) + ctx; } /** * @brief Launches kernel to gather row offsets * * This is done in two phases: the first phase returns the possible row counts * per 16K character block for each possible parsing context at the start of the block, * along with the resulting parsing context at the end of the block. * The caller can then compute the actual parsing context at the beginning of each * individual block and total row count. * The second phase outputs the location of each row in the block, using the parsing * context and initial row counter resulting from the previous phase. * Row parsing context will be updated after phase 2 such that the value contains * the number of rows starting at byte_range_end or beyond. * * @param options Options that control parsing of individual fields * @param row_ctx Row parsing context (output of phase 1 or input to phase 2) * @param offsets_out Row offsets (nullptr for phase1, non-null indicates phase 2) * @param data Base pointer of character data (all row offsets are relative to this) * @param chunk_size Total number of characters to parse * @param parse_pos Current parsing position in the file * @param start_offset Position of the start of the character buffer in the file * @param data_size CSV file size * @param byte_range_start Ignore rows starting before this position in the file * @param byte_range_end In phase 2, store the number of rows beyond range in row_ctx * @param skip_rows Number of rows to skip (ignored in phase 1) * @param stream CUDA stream used for device memory operations and kernel launches. * * @return Number of row contexts */ uint32_t gather_row_offsets(cudf::io::parse_options_view const& options, uint64_t* row_ctx, device_span<uint64_t> offsets_out, device_span<char const> data, size_t chunk_size, size_t parse_pos, size_t start_offset, size_t data_size, size_t byte_range_start, size_t byte_range_end, size_t skip_rows, rmm::cuda_stream_view stream); /** * Count the number of blank rows in the given row offset array * * @param options Options that control parsing of individual fields * @param data Character data buffer * @param row_offsets Row offsets in the character data buffer * @param stream CUDA stream used for device memory operations and kernel launches. */ size_t count_blank_rows(cudf::io::parse_options_view const& options, device_span<char const> data, device_span<uint64_t const> row_offsets, rmm::cuda_stream_view stream); /** * Remove blank rows in the given row offset array * * @param options Options that control parsing of individual fields * @param data Character data buffer * @param row_offsets Row offsets in the character data buffer * @param stream CUDA stream used for device memory operations and kernel launches. */ device_span<uint64_t> remove_blank_rows(cudf::io::parse_options_view const& options, device_span<char const> data, device_span<uint64_t> row_offsets, rmm::cuda_stream_view stream); /** * @brief Launches kernel for detecting possible dtype of each column of data * * @param[in] options Options that control individual field data conversion * @param[in] data The row-column data * @param[in] column_flags Flags that control individual column parsing * @param[in] row_offsets List of row data start positions (offsets) * @param[in] stream CUDA stream to use * * @return stats Histogram of each dtypes' occurrence for each column */ std::vector<column_type_histogram> detect_column_types( cudf::io::parse_options_view const& options, device_span<char const> data, device_span<column_parse::flags const> column_flags, device_span<uint64_t const> row_offsets, size_t const num_active_columns, rmm::cuda_stream_view stream); /** * @brief Launches kernel for decoding row-column data * * @param[in] options Options that control individual field data conversion * @param[in] data The row-column data * @param[in] column_flags Flags that control individual column parsing * @param[in] row_offsets List of row data start positions (offsets) * @param[in] dtypes List of dtype corresponding to each column * @param[out] columns Device memory output of column data * @param[out] valids Device memory output of column valids bitmap data * @param[out] valid_counts Device memory output of the number of valid fields in each column * @param[in] stream CUDA stream to use */ void decode_row_column_data(cudf::io::parse_options_view const& options, device_span<char const> data, device_span<column_parse::flags const> column_flags, device_span<uint64_t const> row_offsets, device_span<cudf::data_type const> dtypes, device_span<void* const> columns, device_span<cudf::bitmask_type* const> valids, device_span<size_type> valid_counts, rmm::cuda_stream_view stream); } // namespace gpu } // namespace csv } // namespace io } // namespace cudf
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/csv/writer_impl.cu
/* * Copyright (c) 2020-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * @file writer_impl.cu * @brief cuDF-IO CSV writer class implementation */ #include "durations.hpp" #include "csv_common.hpp" #include "csv_gpu.hpp" #include <cudf/column/column_device_view.cuh> #include <cudf/detail/copy.hpp> #include <cudf/detail/null_mask.hpp> #include <cudf/io/data_sink.hpp> #include <cudf/io/detail/csv.hpp> #include <cudf/null_mask.hpp> #include <cudf/scalar/scalar.hpp> #include <cudf/strings/detail/combine.hpp> #include <cudf/strings/detail/converters.hpp> #include <cudf/strings/detail/replace.hpp> #include <cudf/strings/detail/strings_children.cuh> #include <cudf/strings/strings_column_view.hpp> #include <cudf/table/table.hpp> #include <cudf/utilities/error.hpp> #include <rmm/cuda_stream_view.hpp> #include <rmm/exec_policy.hpp> #include <rmm/mr/device/per_device_resource.hpp> #include <thrust/execution_policy.h> #include <thrust/host_vector.h> #include <thrust/logical.h> #include <thrust/scan.h> #include <thrust/tabulate.h> #include <algorithm> #include <memory> #include <sstream> #include <string> #include <vector> namespace cudf { namespace io { namespace detail { namespace csv { using namespace cudf::io::csv; using namespace cudf::io; namespace { /** * @brief Functor to modify a string column for CSV format. * * If a row contains specific characters, the entire row must be * output in double-quotes. Also, if a double-quote appears it * must be escaped using a 2nd double-quote. */ struct escape_strings_fn { column_device_view const d_column; string_view const d_delimiter; // check for column delimiter size_type* d_offsets{}; char* d_chars{}; __device__ void write_char(char_utf8 chr, char*& d_buffer, size_type& bytes) { if (d_buffer) d_buffer += cudf::strings::detail::from_char_utf8(chr, d_buffer); else bytes += cudf::strings::detail::bytes_in_char_utf8(chr); } __device__ void operator()(size_type idx) { if (d_column.is_null(idx)) { if (!d_chars) d_offsets[idx] = 0; return; } constexpr char_utf8 const quote = '\"'; // check for quote constexpr char_utf8 const new_line = '\n'; // and for new-line auto const d_str = d_column.element<string_view>(idx); // if quote, new-line or a column delimiter appear in the string // the entire string must be double-quoted. bool const quote_row = thrust::any_of( thrust::seq, d_str.begin(), d_str.end(), [d_delimiter = d_delimiter](auto chr) { return chr == quote || chr == new_line || chr == d_delimiter[0]; }); char* d_buffer = d_chars ? d_chars + d_offsets[idx] : nullptr; size_type bytes = 0; if (quote_row) write_char(quote, d_buffer, bytes); for (auto chr : d_str) { if (chr == quote) write_char(quote, d_buffer, bytes); write_char(chr, d_buffer, bytes); } if (quote_row) write_char(quote, d_buffer, bytes); if (!d_chars) d_offsets[idx] = bytes; } }; struct column_to_strings_fn { // compile-time predicate that defines unsupported column types; // based on the conditions used for instantiations of individual // converters in strings/convert/convert_*.hpp; //(this should have been a `variable template`, // instead of a static function, but nvcc (10.0) // fails to compile var-templs); // template <typename column_type> constexpr static bool is_not_handled() { // Note: the case (not std::is_same_v<column_type, bool>) // is already covered by is_integral) // return not((std::is_same_v<column_type, cudf::string_view>) || (std::is_integral_v<column_type>) || (std::is_floating_point_v<column_type>) || (cudf::is_fixed_point<column_type>()) || (cudf::is_timestamp<column_type>()) || (cudf::is_duration<column_type>())); } explicit column_to_strings_fn(csv_writer_options const& options, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) : options_(options), stream_(stream), mr_(mr) { } ~column_to_strings_fn() = default; column_to_strings_fn(column_to_strings_fn const&) = delete; column_to_strings_fn& operator=(column_to_strings_fn const&) = delete; column_to_strings_fn(column_to_strings_fn&&) = delete; column_to_strings_fn& operator=(column_to_strings_fn&&) = delete; // Note: `null` replacement with `na_rep` deferred to `concatenate()` // instead of column-wise; might be faster // // Note: Cannot pass `stream` to detail::<fname> version of <fname> calls below, because they are // not exposed in header (see, for example, detail::concatenate(tbl_view, separator, na_rep, // stream, mr) is declared and defined in combine.cu); Possible solution: declare `extern`, or // just declare a prototype inside `namespace cudf::strings::detail`; // bools: // template <typename column_type> std::enable_if_t<std::is_same_v<column_type, bool>, std::unique_ptr<column>> operator()( column_view const& column) const { string_scalar true_string{options_.get_true_value(), true, stream_}; string_scalar false_string{options_.get_false_value(), true, stream_}; return cudf::strings::detail::from_booleans(column, true_string, false_string, stream_, mr_); } // strings: // template <typename column_type> std::enable_if_t<std::is_same_v<column_type, cudf::string_view>, std::unique_ptr<column>> operator()(column_view const& column_v) const { if (options_.get_quoting() == cudf::io::quote_style::NONE) { return std::make_unique<column>(column_v, stream_, mr_); } // handle special characters: {delimiter, '\n', "} in row: string_scalar delimiter{std::string{options_.get_inter_column_delimiter()}, true, stream_}; auto d_column = column_device_view::create(column_v, stream_); escape_strings_fn fn{*d_column, delimiter.value(stream_)}; auto children = cudf::strings::detail::make_strings_children(fn, column_v.size(), stream_, mr_); return make_strings_column(column_v.size(), std::move(children.first), std::move(children.second), column_v.null_count(), cudf::detail::copy_bitmask(column_v, stream_, mr_)); } // ints: // template <typename column_type> std::enable_if_t<std::is_integral_v<column_type> && !std::is_same_v<column_type, bool>, std::unique_ptr<column>> operator()(column_view const& column) const { return cudf::strings::detail::from_integers(column, stream_, mr_); } // floats: // template <typename column_type> std::enable_if_t<std::is_floating_point_v<column_type>, std::unique_ptr<column>> operator()( column_view const& column) const { return cudf::strings::detail::from_floats(column, stream_, mr_); } // fixed point: // template <typename column_type> std::enable_if_t<cudf::is_fixed_point<column_type>(), std::unique_ptr<column>> operator()( column_view const& column) const { return cudf::strings::detail::from_fixed_point(column, stream_, mr_); } // timestamps: // template <typename column_type> std::enable_if_t<cudf::is_timestamp<column_type>(), std::unique_ptr<column>> operator()( column_view const& column) const { std::string format = [&]() { if (std::is_same_v<cudf::timestamp_s, column_type>) { return std::string{"%Y-%m-%dT%H:%M:%SZ"}; } else if (std::is_same_v<cudf::timestamp_ms, column_type>) { return std::string{"%Y-%m-%dT%H:%M:%S.%3fZ"}; } else if (std::is_same_v<cudf::timestamp_us, column_type>) { return std::string{"%Y-%m-%dT%H:%M:%S.%6fZ"}; } else if (std::is_same_v<cudf::timestamp_ns, column_type>) { return std::string{"%Y-%m-%dT%H:%M:%S.%9fZ"}; } else { return std::string{"%Y-%m-%d"}; } }(); // handle the cases where delimiter / line-terminator can be // "-" or ":", in which case we need to add quotes to the format // std::string delimiter{options_.get_inter_column_delimiter()}; std::string newline{options_.get_line_terminator()}; constexpr char const* dash{"-"}; constexpr char const* colon{":"}; if (delimiter == dash || newline == dash || delimiter == colon || newline == colon) { format = "\"" + format + "\""; } return cudf::strings::detail::from_timestamps( column, format, strings_column_view(make_empty_column(type_id::STRING)->view()), stream_, mr_); } template <typename column_type> std::enable_if_t<cudf::is_duration<column_type>(), std::unique_ptr<column>> operator()( column_view const& column) const { return cudf::io::detail::csv::pandas_format_durations(column, stream_, mr_); } // unsupported type of column: // template <typename column_type> std::enable_if_t<is_not_handled<column_type>(), std::unique_ptr<column>> operator()( column_view const&) const { CUDF_FAIL("Unsupported column type."); } private: csv_writer_options const& options_; rmm::cuda_stream_view stream_; rmm::mr::device_memory_resource* mr_; }; } // unnamed namespace // write the header: column names: // void write_chunked_begin(data_sink* out_sink, table_view const& table, host_span<std::string const> user_column_names, csv_writer_options const& options, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { if (options.is_enabled_include_header()) { // need to generate column names if names are not provided std::vector<std::string> generated_col_names; if (user_column_names.empty()) { generated_col_names.resize(table.num_columns()); thrust::tabulate(generated_col_names.begin(), generated_col_names.end(), [](auto idx) { return std::to_string(idx); }); } auto const& column_names = user_column_names.empty() ? generated_col_names : user_column_names; CUDF_EXPECTS(column_names.size() == static_cast<size_t>(table.num_columns()), "Mismatch between number of column headers and table columns."); auto const delimiter = options.get_inter_column_delimiter(); auto const terminator = options.get_line_terminator(); // process header names: // - if the header name includes the delimiter or terminator character, // it must be double-quoted // - if the header name includes a double-quote, it must be escaped // with a 2nd double-quote std::stringstream ss; std::transform(column_names.begin(), column_names.end(), std::ostream_iterator<std::string>(ss, std::string{delimiter}.c_str()), [delimiter, terminator](std::string name) { char const quote = '\"'; if (name.empty() || // no header name name.front() == quote) { // header already quoted return name; } // escape any quotes size_t pos = 0; while ((pos = name.find(quote, pos)) != name.npos) { name.insert(pos, 1, quote); pos += 2; } // check if overall quotes are required if (std::any_of(name.begin(), name.end(), [&](auto const chr) { return chr == quote || chr == delimiter || chr == terminator.front(); })) { name.insert(name.begin(), quote); name.insert(name.end(), quote); } return name; }); // add line terminator std::string header = ss.str(); if (!header.empty()) { header.erase(header.end() - 1); // remove extra final delimiter } header.append(terminator); out_sink->host_write(header.data(), header.size()); } } void write_chunked(data_sink* out_sink, strings_column_view const& str_column_view, csv_writer_options const& options, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { // algorithm outline: // // for_each(strings_column.begin(), strings_column.end(), // [sink = out_sink](auto str_row) mutable { // auto host_buffer = str_row.host_buffer(); // sink->host_write(host_buffer_.data(), host_buffer_.size()); // });//or...sink->device_write(device_buffer,...); // // added line_terminator functionality // CUDF_EXPECTS(str_column_view.size() > 0, "Unexpected empty strings column."); cudf::string_scalar newline{options.get_line_terminator(), true, stream}; auto p_str_col_w_nl = cudf::strings::detail::join_strings(str_column_view, newline, string_scalar{"", false, stream}, stream, rmm::mr::get_current_device_resource()); strings_column_view strings_column{p_str_col_w_nl->view()}; auto total_num_bytes = strings_column.chars_size(); char const* ptr_all_bytes = strings_column.chars_begin(); if (out_sink->is_device_write_preferred(total_num_bytes)) { // Direct write from device memory out_sink->device_write(ptr_all_bytes, total_num_bytes, stream); } else { // copy the bytes to host to write them out thrust::host_vector<char> h_bytes(total_num_bytes); CUDF_CUDA_TRY(cudaMemcpyAsync(h_bytes.data(), ptr_all_bytes, total_num_bytes * sizeof(char), cudaMemcpyDefault, stream.value())); stream.synchronize(); out_sink->host_write(h_bytes.data(), total_num_bytes); } // Needs newline at the end, to separate from next chunk if (out_sink->is_device_write_preferred(newline.size())) { out_sink->device_write(newline.data(), newline.size(), stream); } else { out_sink->host_write(options.get_line_terminator().data(), options.get_line_terminator().size()); } } void write_csv(data_sink* out_sink, table_view const& table, host_span<std::string const> user_column_names, csv_writer_options const& options, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { // write header: column names separated by delimiter: // (even for tables with no rows) // write_chunked_begin(out_sink, table, user_column_names, options, stream, mr); if (table.num_rows() > 0) { // no need to check same-size columns constraint; auto-enforced by table_view auto n_rows_per_chunk = options.get_rows_per_chunk(); // // This outputs the CSV in row chunks to save memory. // Maybe we can use the total_rows*count calculation and a memory threshold // instead of an arbitrary chunk count. // The entire CSV chunk must fit in CPU memory before writing it out. // if (n_rows_per_chunk % 8) // must be divisible by 8 n_rows_per_chunk += 8 - (n_rows_per_chunk % 8); CUDF_EXPECTS(n_rows_per_chunk >= 8, "write_csv: invalid chunk_rows; must be at least 8"); auto num_rows = table.num_rows(); std::vector<table_view> vector_views; if (num_rows <= n_rows_per_chunk) { vector_views.push_back(table); } else { auto const n_chunks = num_rows / n_rows_per_chunk; std::vector<size_type> splits(n_chunks); thrust::tabulate(splits.begin(), splits.end(), [n_rows_per_chunk](auto idx) { return (idx + 1) * n_rows_per_chunk; }); // split table_view into chunks: vector_views = cudf::detail::split(table, splits, stream); } // convert each chunk to CSV: // column_to_strings_fn converter{options, stream, rmm::mr::get_current_device_resource()}; for (auto&& sub_view : vector_views) { // Skip if the table has no rows if (sub_view.num_rows() == 0) continue; std::vector<std::unique_ptr<column>> str_column_vec; // populate vector of string-converted columns: // std::transform( sub_view.begin(), sub_view.end(), std::back_inserter(str_column_vec), [&converter = std::as_const(converter)](auto const& current_col) { return cudf::type_dispatcher<cudf::id_to_type_impl, column_to_strings_fn const&>( current_col.type(), converter, current_col); }); // create string table view from str_column_vec: // auto str_table_ptr = std::make_unique<cudf::table>(std::move(str_column_vec)); auto str_table_view = str_table_ptr->view(); // concatenate columns in each row into one big string column // (using null representation and delimiter): // auto str_concat_col = [&] { cudf::string_scalar delimiter_str{ std::string{options.get_inter_column_delimiter()}, true, stream}; cudf::string_scalar options_narep{options.get_na_rep(), true, stream}; if (str_table_view.num_columns() > 1) return cudf::strings::detail::concatenate(str_table_view, delimiter_str, options_narep, strings::separator_on_nulls::YES, stream, rmm::mr::get_current_device_resource()); return cudf::strings::detail::replace_nulls( str_table_view.column(0), options_narep, stream, rmm::mr::get_current_device_resource()); }(); write_chunked(out_sink, str_concat_col->view(), options, stream, mr); } } } } // namespace csv } // namespace detail } // namespace io } // namespace cudf
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/csv/csv_gpu.cu
/* * Copyright (c) 2019-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "csv_common.hpp" #include "csv_gpu.hpp" #include <io/utilities/block_utils.cuh> #include <io/utilities/parsing_utils.cuh> #include <cudf/detail/utilities/cuda.cuh> #include <cudf/detail/utilities/vector_factories.hpp> #include <cudf/fixed_point/fixed_point.hpp> #include <cudf/null_mask.hpp> #include <cudf/strings/detail/convert/fixed_point.cuh> #include <cudf/strings/string_view.cuh> #include <cudf/utilities/bit.hpp> #include <cudf/utilities/error.hpp> #include <cudf/utilities/span.hpp> #include <cudf/utilities/traits.hpp> #include <cudf/utilities/type_dispatcher.hpp> #include <io/utilities/trie.cuh> #include <rmm/cuda_stream_view.hpp> #include <rmm/exec_policy.hpp> #include <thrust/count.h> #include <thrust/detail/copy.h> #include <thrust/remove.h> #include <thrust/transform.h> #include <type_traits> using namespace ::cudf::io; using cudf::device_span; using cudf::detail::grid_1d; namespace cudf { namespace io { namespace csv { namespace gpu { /// Block dimension for dtype detection and conversion kernels constexpr uint32_t csvparse_block_dim = 128; /* * @brief Returns true is the input character is a valid digit. * Supports both decimal and hexadecimal digits (uppercase and lowercase). * * @param c Character to check * @param is_hex Whether to check as a hexadecimal * * @return `true` if it is digit-like, `false` otherwise */ __device__ __inline__ bool is_digit(char c, bool is_hex = false) { if (c >= '0' && c <= '9') return true; if (is_hex) { if (c >= 'A' && c <= 'F') return true; if (c >= 'a' && c <= 'f') return true; } return false; } /* * @brief Checks whether the given character counters indicate a potentially * valid date and/or time field. * * For performance and simplicity, we detect only the most common date * formats. Example formats that are detectable: * * `2001/02/30` * `2001-02-30 00:00:00` * `2/30/2001 T04:05:60.7` * `2 / 1 / 2011` * `02/January` * * @param len Number of non special-symbol or numeric characters * @param decimal_count Number of '.' characters * @param colon_count Number of ':' characters * @param dash_count Number of '-' characters * @param slash_count Number of '/' characters * * @return `true` if it is date-like, `false` otherwise */ __device__ __inline__ bool is_datetime( long len, long decimal_count, long colon_count, long dash_count, long slash_count) { // Must not exceed count of longest month (September) plus `T` time indicator if (len > 10) { return false; } // Must not exceed more than one decimals or more than two time separators if (decimal_count > 1 || colon_count > 2) { return false; } // Must have one or two '-' or '/' but not both as date separators if ((dash_count > 0 && dash_count < 3 && slash_count == 0) || (dash_count == 0 && slash_count > 0 && slash_count < 3)) { return true; } return false; } /* * @brief Returns true if the counters indicate a potentially valid float. * False positives are possible because positions are not taken into account. * For example, field "e.123-" would match the pattern. * * @param len Number of non special-symbol or numeric characters * @param digit_count Number of digits characters * @param decimal_count Number of occurrences of the decimal point character * @param thousands_count Number of occurrences of the thousands separator character * @param dash_count Number of '-' characters * @param exponent_count Number of 'e or E' characters * * @return `true` if it is floating point-like, `false` otherwise */ __device__ __inline__ bool is_floatingpoint(long len, long digit_count, long decimal_count, long thousands_count, long dash_count, long exponent_count) { // Can't have more than one exponent and one decimal point if (decimal_count > 1) return false; if (exponent_count > 1) return false; // Without the exponent or a decimal point, this is an integer, not a float if (decimal_count == 0 && exponent_count == 0) return false; // Can only have one '-' per component if (dash_count > 1 + exponent_count) return false; // If anything other than these characters is present, it's not a float if (digit_count + decimal_count + dash_count + exponent_count + thousands_count != len) { return false; } // Needs at least 1 digit, 2 if exponent is present if (digit_count < 1 + exponent_count) return false; return true; } /* * @brief CUDA kernel that parses and converts CSV data into cuDF column data. * * Data is processed in one row/record at a time, so the number of total * threads (tid) is equal to the number of rows. * * @param opts A set of parsing options * @param csv_text The entire CSV data to read * @param column_flags Per-column parsing behavior flags * @param row_offsets The start the CSV data of interest * @param d_column_data The count for each column data type */ __global__ void __launch_bounds__(csvparse_block_dim) data_type_detection(parse_options_view const opts, device_span<char const> csv_text, device_span<column_parse::flags const> const column_flags, device_span<uint64_t const> const row_offsets, device_span<column_type_histogram> d_column_data) { auto const raw_csv = csv_text.data(); // ThreadIds range per block, so also need the blockId // This is entry into the fields; threadId is an element within `num_records` auto const rec_id = grid_1d::global_thread_id(); auto const rec_id_next = rec_id + 1; // we can have more threads than data, make sure we are not past the end of the data if (rec_id_next >= row_offsets.size()) { return; } auto field_start = raw_csv + row_offsets[rec_id]; auto const row_end = raw_csv + row_offsets[rec_id_next]; auto next_field = field_start; int col = 0; int actual_col = 0; // Going through all the columns of a given record while (col < column_flags.size() && field_start < row_end) { auto next_delimiter = cudf::io::gpu::seek_field_end(field_start, row_end, opts); // Checking if this is a column that the user wants --- user can filter columns if (column_flags[col] & column_parse::inferred) { // points to last character in the field auto const field_len = static_cast<size_t>(next_delimiter - field_start); if (serialized_trie_contains(opts.trie_na, {field_start, field_len})) { atomicAdd(&d_column_data[actual_col].null_count, 1); } else if (serialized_trie_contains(opts.trie_true, {field_start, field_len}) || serialized_trie_contains(opts.trie_false, {field_start, field_len})) { atomicAdd(&d_column_data[actual_col].bool_count, 1); } else if (cudf::io::is_infinity(field_start, next_delimiter)) { atomicAdd(&d_column_data[actual_col].float_count, 1); } else { long count_number = 0; long count_decimal = 0; long count_thousands = 0; long count_slash = 0; long count_dash = 0; long count_plus = 0; long count_colon = 0; long count_string = 0; long count_exponent = 0; // Modify field_start & end to ignore whitespace and quotechars // This could possibly result in additional empty fields auto const trimmed_field_range = trim_whitespaces_quotes(field_start, next_delimiter); auto const trimmed_field_len = trimmed_field_range.second - trimmed_field_range.first; for (auto cur = trimmed_field_range.first; cur < trimmed_field_range.second; ++cur) { if (is_digit(*cur)) { count_number++; continue; } if (*cur == opts.decimal) { count_decimal++; continue; } if (*cur == opts.thousands) { count_thousands++; continue; } // Looking for unique characters that will help identify column types. switch (*cur) { case '-': count_dash++; break; case '+': count_plus++; break; case '/': count_slash++; break; case ':': count_colon++; break; case 'e': case 'E': if (cur > trimmed_field_range.first && cur < trimmed_field_range.second - 1) count_exponent++; break; default: count_string++; break; } } // Integers have to have the length of the string // Off by one if they start with a minus sign auto const int_req_number_cnt = trimmed_field_len - count_thousands - ((*trimmed_field_range.first == '-' || *trimmed_field_range.first == '+') && trimmed_field_len > 1); if (column_flags[col] & column_parse::as_datetime) { // PANDAS uses `object` dtype if the date is unparseable if (is_datetime(count_string, count_decimal, count_colon, count_dash, count_slash)) { atomicAdd(&d_column_data[actual_col].datetime_count, 1); } else { atomicAdd(&d_column_data[actual_col].string_count, 1); } } else if (count_number == int_req_number_cnt) { auto const is_negative = (*trimmed_field_range.first == '-'); auto const data_begin = trimmed_field_range.first + (is_negative || (*trimmed_field_range.first == '+')); cudf::size_type* ptr = cudf::io::gpu::infer_integral_field_counter( data_begin, data_begin + count_number, is_negative, d_column_data[actual_col]); atomicAdd(ptr, 1); } else if (is_floatingpoint(trimmed_field_len, count_number, count_decimal, count_thousands, count_dash + count_plus, count_exponent)) { atomicAdd(&d_column_data[actual_col].float_count, 1); } else { atomicAdd(&d_column_data[actual_col].string_count, 1); } } actual_col++; } next_field = next_delimiter + 1; field_start = next_field; col++; } } /** * @brief CUDA kernel that parses and converts CSV data into cuDF column data. * * Data is processed one record at a time * * @param[in] options A set of parsing options * @param[in] data The entire CSV data to read * @param[in] column_flags Per-column parsing behavior flags * @param[in] row_offsets The start the CSV data of interest * @param[in] dtypes The data type of the column * @param[out] columns The output column data * @param[out] valids The bitmaps indicating whether column fields are valid * @param[out] valid_counts The number of valid fields in each column */ __global__ void __launch_bounds__(csvparse_block_dim) convert_csv_to_cudf(cudf::io::parse_options_view options, device_span<char const> data, device_span<column_parse::flags const> column_flags, device_span<uint64_t const> row_offsets, device_span<cudf::data_type const> dtypes, device_span<void* const> columns, device_span<cudf::bitmask_type* const> valids, device_span<size_type> valid_counts) { auto const raw_csv = data.data(); // thread IDs range per block, so also need the block id. // this is entry into the field array - tid is an elements within the num_entries array auto const rec_id = grid_1d::global_thread_id(); auto const rec_id_next = rec_id + 1; // we can have more threads than data, make sure we are not past the end of the data if (rec_id_next >= row_offsets.size()) return; auto field_start = raw_csv + row_offsets[rec_id]; auto const row_end = raw_csv + row_offsets[rec_id_next]; auto next_field = field_start; int col = 0; int actual_col = 0; while (col < column_flags.size() && field_start < row_end) { auto next_delimiter = cudf::io::gpu::seek_field_end(next_field, row_end, options); if (column_flags[col] & column_parse::enabled) { // check if the entire field is a NaN string - consistent with pandas auto const is_valid = !serialized_trie_contains( options.trie_na, {field_start, static_cast<size_t>(next_delimiter - field_start)}); // Modify field_start & end to ignore whitespace and quotechars auto field_end = next_delimiter; if (is_valid && dtypes[actual_col].id() != cudf::type_id::STRING) { auto const trimmed_field = trim_whitespaces_quotes(field_start, field_end, options.quotechar); field_start = trimmed_field.first; field_end = trimmed_field.second; } if (is_valid) { // Type dispatcher does not handle STRING if (dtypes[actual_col].id() == cudf::type_id::STRING) { auto end = next_delimiter; if (not options.keepquotes) { if ((*field_start == options.quotechar) && (*(end - 1) == options.quotechar)) { ++field_start; --end; } } auto str_list = static_cast<std::pair<char const*, size_t>*>(columns[actual_col]); str_list[rec_id].first = field_start; str_list[rec_id].second = end - field_start; } else { if (cudf::type_dispatcher(dtypes[actual_col], ConvertFunctor{}, field_start, field_end, columns[actual_col], rec_id, dtypes[actual_col], options, column_flags[col] & column_parse::as_hexadecimal)) { // set the valid bitmap - all bits were set to 0 to start set_bit(valids[actual_col], rec_id); atomicAdd(&valid_counts[actual_col], 1); } } } else if (dtypes[actual_col].id() == cudf::type_id::STRING) { auto str_list = static_cast<std::pair<char const*, size_t>*>(columns[actual_col]); str_list[rec_id].first = nullptr; str_list[rec_id].second = 0; } ++actual_col; } next_field = next_delimiter + 1; field_start = next_field; ++col; } } /* * @brief Merge two packed row contexts (each corresponding to a block of characters) * and return the packed row context corresponding to the merged character block */ inline __device__ packed_rowctx_t merge_row_contexts(packed_rowctx_t first_ctx, packed_rowctx_t second_ctx) { uint32_t id0 = get_row_context(first_ctx, ROW_CTX_NONE) & 3; uint32_t id1 = get_row_context(first_ctx, ROW_CTX_QUOTE) & 3; uint32_t id2 = get_row_context(first_ctx, ROW_CTX_COMMENT) & 3; return (first_ctx & ~pack_row_contexts(3, 3, 3)) + pack_row_contexts(get_row_context(second_ctx, id0), get_row_context(second_ctx, id1), get_row_context(second_ctx, id2)); } /* * @brief Per-character context: * 1-bit count (0 or 1) per context in the lower 4 bits * 2-bit output context id per input context in bits 8..15 */ constexpr __device__ uint32_t make_char_context(uint32_t id0, uint32_t id1, uint32_t id2 = ROW_CTX_COMMENT, uint32_t c0 = 0, uint32_t c1 = 0, uint32_t c2 = 0) { return (id0 << 8) | (id1 << 10) | (id2 << 12) | (ROW_CTX_EOF << 14) | (c0) | (c1 << 1) | (c2 << 2); } /* * @brief Merge a 1-character context to keep track of bitmasks where new rows occur * Merges a single-character "block" row context at position pos with the current * block's row context (the current block contains 32-pos characters) * * @param ctx Current block context and new rows bitmaps * @param char_ctx state transitions associated with new character * @param pos Position within the current 32-character block * * NOTE: This is probably the most performance-critical piece of the row gathering kernel. * The char_ctx value should be created via make_char_context, and its value should * have been evaluated at compile-time. */ inline __device__ void merge_char_context(uint4& ctx, uint32_t char_ctx, uint32_t pos) { uint32_t id0 = (ctx.w >> 0) & 3; uint32_t id1 = (ctx.w >> 2) & 3; uint32_t id2 = (ctx.w >> 4) & 3; // Set the newrow bit in the bitmap at the corresponding position ctx.x |= ((char_ctx >> id0) & 1) << pos; ctx.y |= ((char_ctx >> id1) & 1) << pos; ctx.z |= ((char_ctx >> id2) & 1) << pos; // Update the output context ids ctx.w = ((char_ctx >> (8 + id0 * 2)) & 0x03) | ((char_ctx >> (6 + id1 * 2)) & 0x0c) | ((char_ctx >> (4 + id2 * 2)) & 0x30) | (ROW_CTX_EOF << 6); } /* * Convert the context-with-row-bitmaps version to a packed row context */ inline __device__ packed_rowctx_t pack_rowmaps(uint4 ctx_map) { return pack_row_contexts(make_row_context(__popc(ctx_map.x), (ctx_map.w >> 0) & 3), make_row_context(__popc(ctx_map.y), (ctx_map.w >> 2) & 3), make_row_context(__popc(ctx_map.z), (ctx_map.w >> 4) & 3)); } /* * Selects the row bitmap corresponding to the given parser state */ inline __device__ uint32_t select_rowmap(uint4 ctx_map, uint32_t ctxid) { return (ctxid == ROW_CTX_NONE) ? ctx_map.x : (ctxid == ROW_CTX_QUOTE) ? ctx_map.y : (ctxid == ROW_CTX_COMMENT) ? ctx_map.z : 0; } /** * @brief Single pair-wise 512-wide row context merge transform * * Merge row context blocks and record the merge operation in a context * tree so that the transform is reversible. * The tree is organized such that the left and right children of node n * are located at indices n*2 and n*2+1, the root node starting at index 1 * * @tparam lanemask mask to specify source of packed row context * @tparam tmask mask to specify principle thread for merging row context * @tparam base start location for writing into packed row context tree * @tparam level_scale level of the node in the tree * @param[out] ctxtree packed row context tree * @param[in] ctxb packed row context for the current character block * @param t thread id (leaf node id) */ template <uint32_t lanemask, uint32_t tmask, uint32_t base, uint32_t level_scale> inline __device__ void ctx_merge(uint64_t* ctxtree, packed_rowctx_t* ctxb, uint32_t t) { uint64_t tmp = shuffle_xor(*ctxb, lanemask); if (!(t & tmask)) { *ctxb = merge_row_contexts(*ctxb, tmp); ctxtree[base + (t >> level_scale)] = *ctxb; } } /** * @brief Single 512-wide row context inverse merge transform * * Walks the context tree starting from a root node * * @tparam rmask Mask to specify which threads write input row context * @param[in] base Start read location of the merge transform tree * @param[in] ctxtree Merge transform tree * @param[in] ctx Input context * @param[in] brow4 output row in block *4 * @param[in] t thread id (leaf node id) */ template <uint32_t rmask> inline __device__ void ctx_unmerge( uint32_t base, uint64_t* ctxtree, uint32_t* ctx, uint32_t* brow4, uint32_t t) { rowctx32_t ctxb_left, ctxb_right, ctxb_sum; ctxb_sum = get_row_context(ctxtree[base], *ctx); ctxb_left = get_row_context(ctxtree[(base)*2 + 0], *ctx); ctxb_right = get_row_context(ctxtree[(base)*2 + 1], ctxb_left & 3); if (t & (rmask)) { *brow4 += (ctxb_sum & ~3) - (ctxb_right & ~3); *ctx = ctxb_left & 3; } } /* * @brief 512-wide row context merge transform * * Repeatedly merge row context blocks, keeping track of each merge operation * in a context tree so that the transform is reversible * The tree is organized such that the left and right children of node n * are located at indices n*2 and n*2+1, the root node starting at index 1 * * Each node contains the counts and output contexts corresponding to the * possible input contexts. * Each parent node's count is obtained by adding the corresponding counts * from the left child node with the right child node's count selected from * the left child node's output context: * parent.count[k] = left.count[k] + right.count[left.outctx[k]] * parent.outctx[k] = right.outctx[left.outctx[k]] * * @param[out] ctxtree packed row context tree * @param[in] ctxb packed row context for the current character block * @param t thread id (leaf node id) */ static inline __device__ void rowctx_merge_transform(uint64_t ctxtree[1024], packed_rowctx_t ctxb, uint32_t t) { ctxtree[512 + t] = ctxb; ctx_merge<1, 0x1, 256, 1>(ctxtree, &ctxb, t); ctx_merge<2, 0x3, 128, 2>(ctxtree, &ctxb, t); ctx_merge<4, 0x7, 64, 3>(ctxtree, &ctxb, t); ctx_merge<8, 0xf, 32, 4>(ctxtree, &ctxb, t); __syncthreads(); if (t < 32) { ctxb = ctxtree[32 + t]; ctx_merge<1, 0x1, 16, 1>(ctxtree, &ctxb, t); ctx_merge<2, 0x3, 8, 2>(ctxtree, &ctxb, t); ctx_merge<4, 0x7, 4, 3>(ctxtree, &ctxb, t); ctx_merge<8, 0xf, 2, 4>(ctxtree, &ctxb, t); // Final stage uint64_t tmp = shuffle_xor(ctxb, 16); if (t == 0) { ctxtree[1] = merge_row_contexts(ctxb, tmp); } } } /* * @brief 512-wide row context inverse merge transform * * Walks the context tree starting from the root node (index 1) using * the starting context in node index 0. * The return value is the starting row and input context for the given leaf node * * @param[in] ctxtree Merge transform tree * @param[in] t thread id (leaf node id) * * @return Final row context and count (row_position*4 + context_id format) */ static inline __device__ rowctx32_t rowctx_inverse_merge_transform(uint64_t ctxtree[1024], uint32_t t) { uint32_t ctx = ctxtree[0] & 3; // Starting input context rowctx32_t brow4 = 0; // output row in block *4 ctx_unmerge<256>(1, ctxtree, &ctx, &brow4, t); ctx_unmerge<128>(2 + (t >> 8), ctxtree, &ctx, &brow4, t); ctx_unmerge<64>(4 + (t >> 7), ctxtree, &ctx, &brow4, t); ctx_unmerge<32>(8 + (t >> 6), ctxtree, &ctx, &brow4, t); ctx_unmerge<16>(16 + (t >> 5), ctxtree, &ctx, &brow4, t); ctx_unmerge<8>(32 + (t >> 4), ctxtree, &ctx, &brow4, t); ctx_unmerge<4>(64 + (t >> 3), ctxtree, &ctx, &brow4, t); ctx_unmerge<2>(128 + (t >> 2), ctxtree, &ctx, &brow4, t); ctx_unmerge<1>(256 + (t >> 1), ctxtree, &ctx, &brow4, t); return brow4 + ctx; } /** * @brief Gather row offsets from CSV character data split into 16KB chunks * * This is done in two phases: the first phase returns the possible row counts * per 16K character block for each possible parsing context at the start of the block, * along with the resulting parsing context at the end of the block. * The caller can then compute the actual parsing context at the beginning of each * individual block and total row count. * The second phase outputs the location of each row in the block, using the parsing * context and initial row counter accumulated from the results of the previous phase. * Row parsing context will be updated after phase 2 such that the value contains * the number of rows starting at byte_range_end or beyond. * * @param row_ctx Row parsing context (output of phase 1 or input to phase 2) * @param offsets_out Row offsets (nullptr for phase1, non-null indicates phase 2) * @param data Base pointer of character data (all row offsets are relative to this) * @param chunk_size Total number of characters to parse * @param parse_pos Current parsing position in the file * @param start_offset Position of the start of the character buffer in the file * @param data_size CSV file size * @param byte_range_start Ignore rows starting before this position in the file * @param byte_range_end In phase 2, store the number of rows beyond range in row_ctx * @param skip_rows Number of rows to skip (ignored in phase 1) * @param terminator Line terminator character * @param delimiter Column delimiter character * @param quotechar Quote character * @param escapechar Delimiter escape character * @param commentchar Comment line character (skip rows starting with this character) */ __global__ void __launch_bounds__(rowofs_block_dim) gather_row_offsets_gpu(uint64_t* row_ctx, device_span<uint64_t> offsets_out, device_span<char const> const data, size_t chunk_size, size_t parse_pos, size_t start_offset, size_t data_size, size_t byte_range_start, size_t byte_range_end, size_t skip_rows, int terminator, int delimiter, int quotechar, int escapechar, int commentchar) { auto start = data.begin(); using block_reduce = typename cub::BlockReduce<uint32_t, rowofs_block_dim>; __shared__ union { typename block_reduce::TempStorage bk_storage; __align__(8) uint64_t ctxtree[rowofs_block_dim * 2]; } temp_storage; char const* end = start + (min(parse_pos + chunk_size, data_size) - start_offset); uint32_t t = threadIdx.x; size_t block_pos = (parse_pos - start_offset) + blockIdx.x * static_cast<size_t>(rowofs_block_bytes) + t * 32; char const* cur = start + block_pos; // Initial state is neutral context (no state transitions), zero rows uint4 ctx_map = { .x = 0, .y = 0, .z = 0, .w = (ROW_CTX_NONE << 0) | (ROW_CTX_QUOTE << 2) | (ROW_CTX_COMMENT << 4) | (ROW_CTX_EOF << 6)}; int c, c_prev = (cur > start && cur <= end) ? cur[-1] : terminator; // Loop through all 32 bytes and keep a bitmask of row starts for each possible input context for (uint32_t pos = 0; pos < 32; pos++, cur++, c_prev = c) { uint32_t ctx; if (cur < end) { c = cur[0]; if (c_prev == terminator) { if (c == commentchar) { // Start of a new comment row ctx = make_char_context(ROW_CTX_COMMENT, ROW_CTX_QUOTE, ROW_CTX_COMMENT, 1, 0, 1); } else if (c == quotechar) { // Quoted string on newrow, or quoted string ending in terminator ctx = make_char_context(ROW_CTX_QUOTE, ROW_CTX_NONE, ROW_CTX_QUOTE, 1, 0, 1); } else { // Start of a new row unless within a quote ctx = make_char_context(ROW_CTX_NONE, ROW_CTX_QUOTE, ROW_CTX_NONE, 1, 0, 1); } } else if (c == quotechar) { if (c_prev == delimiter || c_prev == quotechar) { // Quoted string after delimiter, quoted string ending in delimiter, or double-quote ctx = make_char_context(ROW_CTX_QUOTE, ROW_CTX_NONE); } else { // Closing or ignored quote ctx = make_char_context(ROW_CTX_NONE, ROW_CTX_NONE); } } else { // Neutral character ctx = make_char_context(ROW_CTX_NONE, ROW_CTX_QUOTE); } } else { char const* data_end = start + data_size - start_offset; if (cur <= end && cur == data_end) { // Add a newline at data end (need the extra row offset to infer length of previous row) ctx = make_char_context(ROW_CTX_EOF, ROW_CTX_EOF, ROW_CTX_EOF, 1, 1, 1); } else { // Pass-through context (beyond chunk_size or data_end) ctx = make_char_context(ROW_CTX_NONE, ROW_CTX_QUOTE, ROW_CTX_COMMENT); } } // Merge with current context, keeping track of where new rows occur merge_char_context(ctx_map, ctx, pos); } // Eliminate rows that start before byte_range_start if (start_offset + block_pos < byte_range_start) { uint32_t dist_minus1 = min(byte_range_start - (start_offset + block_pos) - 1, UINT64_C(31)); uint32_t mask = 0xffff'fffe << dist_minus1; ctx_map.x &= mask; ctx_map.y &= mask; ctx_map.z &= mask; } // Convert the long-form {rowmap,outctx}[inctx] version into packed version // {rowcount,ouctx}[inctx], then merge the row contexts of the 32-character blocks into // a single 16K-character block context rowctx_merge_transform(temp_storage.ctxtree, pack_rowmaps(ctx_map), t); // If this is the second phase, get the block's initial parser state and row counter if (offsets_out.data()) { if (t == 0) { temp_storage.ctxtree[0] = row_ctx[blockIdx.x]; } __syncthreads(); // Walk back the transform tree with the known initial parser state rowctx32_t ctx = rowctx_inverse_merge_transform(temp_storage.ctxtree, t); uint64_t row = (temp_storage.ctxtree[0] >> 2) + (ctx >> 2); uint32_t rows_out_of_range = 0; uint32_t rowmap = select_rowmap(ctx_map, ctx & 3); // Output row positions while (rowmap != 0) { uint32_t pos = __ffs(rowmap); block_pos += pos; if (row >= skip_rows && row - skip_rows < offsets_out.size()) { // Output byte offsets are relative to the base of the input buffer offsets_out[row - skip_rows] = block_pos - 1; rows_out_of_range += (start_offset + block_pos - 1 >= byte_range_end); } row++; rowmap >>= pos; } __syncthreads(); // Return the number of rows out of range rows_out_of_range = block_reduce(temp_storage.bk_storage).Sum(rows_out_of_range); if (t == 0) { row_ctx[blockIdx.x] = rows_out_of_range; } } else { // Just store the row counts and output contexts if (t == 0) { row_ctx[blockIdx.x] = temp_storage.ctxtree[1]; } } } size_t __host__ count_blank_rows(cudf::io::parse_options_view const& opts, device_span<char const> data, device_span<uint64_t const> row_offsets, rmm::cuda_stream_view stream) { auto const newline = opts.skipblanklines ? opts.terminator : opts.comment; auto const comment = opts.comment != '\0' ? opts.comment : newline; auto const carriage = (opts.skipblanklines && opts.terminator == '\n') ? '\r' : comment; return thrust::count_if( rmm::exec_policy(stream), row_offsets.begin(), row_offsets.end(), [data = data, newline, comment, carriage] __device__(uint64_t const pos) { return ((pos != data.size()) && (data[pos] == newline || data[pos] == comment || data[pos] == carriage)); }); } device_span<uint64_t> __host__ remove_blank_rows(cudf::io::parse_options_view const& options, device_span<char const> data, device_span<uint64_t> row_offsets, rmm::cuda_stream_view stream) { size_t d_size = data.size(); auto const newline = options.skipblanklines ? options.terminator : options.comment; auto const comment = options.comment != '\0' ? options.comment : newline; auto const carriage = (options.skipblanklines && options.terminator == '\n') ? '\r' : comment; auto new_end = thrust::remove_if( rmm::exec_policy(stream), row_offsets.begin(), row_offsets.end(), [data = data, d_size, newline, comment, carriage] __device__(uint64_t const pos) { return ((pos != d_size) && (data[pos] == newline || data[pos] == comment || data[pos] == carriage)); }); return row_offsets.subspan(0, new_end - row_offsets.begin()); } std::vector<column_type_histogram> detect_column_types( cudf::io::parse_options_view const& options, device_span<char const> const data, device_span<column_parse::flags const> const column_flags, device_span<uint64_t const> const row_starts, size_t const num_active_columns, rmm::cuda_stream_view stream) { // Calculate actual block count to use based on records count int const block_size = csvparse_block_dim; int const grid_size = (row_starts.size() + block_size - 1) / block_size; auto d_stats = detail::make_zeroed_device_uvector_async<column_type_histogram>( num_active_columns, stream, rmm::mr::get_current_device_resource()); data_type_detection<<<grid_size, block_size, 0, stream.value()>>>( options, data, column_flags, row_starts, d_stats); return detail::make_std_vector_sync(d_stats, stream); } void decode_row_column_data(cudf::io::parse_options_view const& options, device_span<char const> data, device_span<column_parse::flags const> column_flags, device_span<uint64_t const> row_offsets, device_span<cudf::data_type const> dtypes, device_span<void* const> columns, device_span<cudf::bitmask_type* const> valids, device_span<size_type> valid_counts, rmm::cuda_stream_view stream) { // Calculate actual block count to use based on records count auto const block_size = csvparse_block_dim; auto const num_rows = row_offsets.size() - 1; auto const grid_size = (num_rows + block_size - 1) / block_size; convert_csv_to_cudf<<<grid_size, block_size, 0, stream.value()>>>( options, data, column_flags, row_offsets, dtypes, columns, valids, valid_counts); } uint32_t __host__ gather_row_offsets(parse_options_view const& options, uint64_t* row_ctx, device_span<uint64_t> const offsets_out, device_span<char const> const data, size_t chunk_size, size_t parse_pos, size_t start_offset, size_t data_size, size_t byte_range_start, size_t byte_range_end, size_t skip_rows, rmm::cuda_stream_view stream) { uint32_t dim_grid = 1 + (chunk_size / rowofs_block_bytes); gather_row_offsets_gpu<<<dim_grid, rowofs_block_dim, 0, stream.value()>>>( row_ctx, offsets_out, data, chunk_size, parse_pos, start_offset, data_size, byte_range_start, byte_range_end, skip_rows, options.terminator, options.delimiter, (options.quotechar) ? options.quotechar : 0x100, /*(options.escapechar) ? options.escapechar :*/ 0x100, (options.comment) ? options.comment : 0x100); return dim_grid; } } // namespace gpu } // namespace csv } // namespace io } // namespace cudf
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/csv/reader_impl.cu
/* * Copyright (c) 2019-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * @file reader_impl.cu * @brief cuDF-IO CSV reader class implementation */ #include "csv_common.hpp" #include "csv_gpu.hpp" #include <io/comp/io_uncomp.hpp> #include <io/utilities/column_buffer.hpp> #include <io/utilities/hostdevice_vector.hpp> #include <io/utilities/parsing_utils.cuh> #include <cudf/detail/utilities/cuda.cuh> #include <cudf/detail/utilities/vector_factories.hpp> #include <cudf/detail/utilities/visitor_overload.hpp> #include <cudf/io/csv.hpp> #include <cudf/io/datasource.hpp> #include <cudf/io/detail/csv.hpp> #include <cudf/io/types.hpp> #include <cudf/strings/detail/replace.hpp> #include <cudf/table/table.hpp> #include <cudf/utilities/error.hpp> #include <cudf/utilities/span.hpp> #include <rmm/cuda_stream_view.hpp> #include <thrust/host_vector.h> #include <thrust/iterator/counting_iterator.h> #include <algorithm> #include <iostream> #include <memory> #include <numeric> #include <string> #include <tuple> #include <unordered_map> #include <unordered_set> #include <utility> #include <vector> using std::string; using std::vector; using cudf::device_span; using cudf::host_span; using cudf::detail::make_device_uvector_async; namespace cudf { namespace io { namespace detail { namespace csv { using namespace cudf::io::csv; using namespace cudf::io; namespace { /** * @brief Offsets of CSV rows in device memory, accessed through a shrinkable span. * * Row offsets are stored this way to avoid reallocation/copies when discarding front or back * elements. */ class selected_rows_offsets { rmm::device_uvector<uint64_t> all; device_span<uint64_t const> selected; public: selected_rows_offsets(rmm::device_uvector<uint64_t>&& data, device_span<uint64_t const> selected_span) : all{std::move(data)}, selected{selected_span} { } selected_rows_offsets(rmm::cuda_stream_view stream) : all{0, stream}, selected{all} {} operator device_span<uint64_t const>() const { return selected; } void shrink(size_t size) { CUDF_EXPECTS(size <= selected.size(), "New size must be smaller"); selected = selected.subspan(0, size); } void erase_first_n(size_t n) { CUDF_EXPECTS(n <= selected.size(), "Too many elements to remove"); selected = selected.subspan(n, selected.size() - n); } auto size() const { return selected.size(); } auto data() const { return selected.data(); } }; /** * @brief Removes the first and Last quote in the string */ string removeQuotes(string str, char quotechar) { // Exclude first and last quotation char size_t const first_quote = str.find(quotechar); if (first_quote != string::npos) { str.erase(first_quote, 1); } size_t const last_quote = str.rfind(quotechar); if (last_quote != string::npos) { str.erase(last_quote, 1); } return str; } /** * @brief Parse the first row to set the column names in the raw_csv parameter. * The first row can be either the header row, or the first data row */ std::vector<std::string> get_column_names(std::vector<char> const& header, parse_options_view const& parse_opts, int header_row, std::string prefix) { std::vector<std::string> col_names; // If there is only a single character then it would be the terminator if (header.size() <= 1) { return col_names; } std::vector<char> first_row = header; bool quotation = false; for (size_t pos = 0, prev = 0; pos < first_row.size(); ++pos) { // Flip the quotation flag if current character is a quotechar if (first_row[pos] == parse_opts.quotechar) { quotation = !quotation; } // Check if end of a column/row else if (pos == first_row.size() - 1 || (!quotation && first_row[pos] == parse_opts.terminator) || (!quotation && first_row[pos] == parse_opts.delimiter)) { // This is the header, add the column name if (header_row >= 0) { // Include the current character, in case the line is not terminated int col_name_len = pos - prev + 1; // Exclude the delimiter/terminator is present if (first_row[pos] == parse_opts.delimiter || first_row[pos] == parse_opts.terminator) { --col_name_len; } // Also exclude '\r' character at the end of the column name if it's // part of the terminator if (col_name_len > 0 && parse_opts.terminator == '\n' && first_row[pos] == '\n' && first_row[pos - 1] == '\r') { --col_name_len; } string const new_col_name(first_row.data() + prev, col_name_len); col_names.push_back(removeQuotes(new_col_name, parse_opts.quotechar)); } else { // This is the first data row, add the automatically generated name col_names.push_back(prefix + std::to_string(col_names.size())); } // Stop parsing when we hit the line terminator; relevant when there is // a blank line following the header. In this case, first_row includes // multiple line terminators at the end, as the new recStart belongs to // a line that comes after the blank line(s) if (!quotation && first_row[pos] == parse_opts.terminator) { break; } // Skip adjacent delimiters if delim_whitespace is set while (parse_opts.multi_delimiter && pos < first_row.size() && first_row[pos] == parse_opts.delimiter && first_row[pos + 1] == parse_opts.delimiter) { ++pos; } prev = pos + 1; } } return col_names; } template <typename C> void erase_except_last(C& container, rmm::cuda_stream_view stream) { cudf::detail::device_single_thread( [span = device_span<typename C::value_type>{container}] __device__() mutable { span.front() = span.back(); }, stream); container.resize(1, stream); } size_t find_first_row_start(char row_terminator, host_span<char const> data) { // For now, look for the first terminator (assume the first terminator isn't within a quote) // TODO: Attempt to infer this from the data size_t pos = 0; while (pos < data.size() && data[pos] != row_terminator) { ++pos; } return std::min(pos + 1, data.size()); } /** * @brief Finds row positions in the specified input data, and loads the selected data onto GPU. * * This function scans the input data to record the row offsets (relative to the start of the * input data). A row is actually the data/offset between two termination symbols. * * @param data Uncompressed input data in host memory * @param range_begin Only include rows starting after this position * @param range_end Only include rows starting before this position * @param skip_rows Number of rows to skip from the start * @param num_rows Number of rows to read; -1: all remaining data * @param load_whole_file Hint that the entire data will be needed on gpu * @param stream CUDA stream used for device memory operations and kernel launches * @return Input data and row offsets in the device memory */ std::pair<rmm::device_uvector<char>, selected_rows_offsets> load_data_and_gather_row_offsets( csv_reader_options const& reader_opts, parse_options const& parse_opts, std::vector<char>& header, host_span<char const> data, size_t range_begin, size_t range_end, size_t skip_rows, int64_t num_rows, bool load_whole_file, rmm::cuda_stream_view stream) { constexpr size_t max_chunk_bytes = 64 * 1024 * 1024; // 64MB size_t buffer_size = std::min(max_chunk_bytes, data.size()); size_t max_blocks = std::max<size_t>((buffer_size / cudf::io::csv::gpu::rowofs_block_bytes) + 1, 2); cudf::detail::hostdevice_vector<uint64_t> row_ctx(max_blocks, stream); size_t buffer_pos = std::min(range_begin - std::min(range_begin, sizeof(char)), data.size()); size_t pos = std::min(range_begin, data.size()); size_t header_rows = (reader_opts.get_header() >= 0) ? reader_opts.get_header() + 1 : 0; uint64_t ctx = 0; // For compatibility with the previous parser, a row is considered in-range if the // previous row terminator is within the given range range_end += (range_end < data.size()); // Reserve memory by allocating and then resetting the size rmm::device_uvector<char> d_data{ (load_whole_file) ? data.size() : std::min(buffer_size * 2, data.size()), stream}; d_data.resize(0, stream); rmm::device_uvector<uint64_t> all_row_offsets{0, stream}; do { size_t target_pos = std::min(pos + max_chunk_bytes, data.size()); size_t chunk_size = target_pos - pos; auto const previous_data_size = d_data.size(); d_data.resize(target_pos - buffer_pos, stream); CUDF_CUDA_TRY(cudaMemcpyAsync(d_data.begin() + previous_data_size, data.begin() + buffer_pos + previous_data_size, target_pos - buffer_pos - previous_data_size, cudaMemcpyDefault, stream.value())); // Pass 1: Count the potential number of rows in each character block for each // possible parser state at the beginning of the block. uint32_t num_blocks = cudf::io::csv::gpu::gather_row_offsets(parse_opts.view(), row_ctx.device_ptr(), device_span<uint64_t>(), d_data, chunk_size, pos, buffer_pos, data.size(), range_begin, range_end, skip_rows, stream); CUDF_CUDA_TRY(cudaMemcpyAsync(row_ctx.host_ptr(), row_ctx.device_ptr(), num_blocks * sizeof(uint64_t), cudaMemcpyDefault, stream.value())); stream.synchronize(); // Sum up the rows in each character block, selecting the row count that // corresponds to the current input context. Also stores the now known input // context per character block that will be needed by the second pass. for (uint32_t i = 0; i < num_blocks; i++) { uint64_t ctx_next = cudf::io::csv::gpu::select_row_context(ctx, row_ctx[i]); row_ctx[i] = ctx; ctx = ctx_next; } size_t total_rows = ctx >> 2; if (total_rows > skip_rows) { // At least one row in range in this batch all_row_offsets.resize(total_rows - skip_rows, stream); CUDF_CUDA_TRY(cudaMemcpyAsync(row_ctx.device_ptr(), row_ctx.host_ptr(), num_blocks * sizeof(uint64_t), cudaMemcpyDefault, stream.value())); // Pass 2: Output row offsets cudf::io::csv::gpu::gather_row_offsets(parse_opts.view(), row_ctx.device_ptr(), all_row_offsets, d_data, chunk_size, pos, buffer_pos, data.size(), range_begin, range_end, skip_rows, stream); // With byte range, we want to keep only one row out of the specified range if (range_end < data.size()) { CUDF_CUDA_TRY(cudaMemcpyAsync(row_ctx.host_ptr(), row_ctx.device_ptr(), num_blocks * sizeof(uint64_t), cudaMemcpyDefault, stream.value())); stream.synchronize(); size_t rows_out_of_range = 0; for (uint32_t i = 0; i < num_blocks; i++) { rows_out_of_range += row_ctx[i]; } if (rows_out_of_range != 0) { // Keep one row out of range (used to infer length of previous row) auto new_row_offsets_size = all_row_offsets.size() - std::min(rows_out_of_range - 1, all_row_offsets.size()); all_row_offsets.resize(new_row_offsets_size, stream); // Implies we reached the end of the range break; } } // num_rows does not include blank rows if (num_rows >= 0) { if (all_row_offsets.size() > header_rows + static_cast<size_t>(num_rows)) { size_t num_blanks = cudf::io::csv::gpu::count_blank_rows( parse_opts.view(), d_data, all_row_offsets, stream); if (all_row_offsets.size() - num_blanks > header_rows + static_cast<size_t>(num_rows)) { // Got the desired number of rows break; } } } } else { // Discard data (all rows below skip_rows), keeping one character for history size_t discard_bytes = std::max(d_data.size(), sizeof(char)) - sizeof(char); if (discard_bytes != 0) { erase_except_last(d_data, stream); buffer_pos += discard_bytes; } } pos = target_pos; } while (pos < data.size()); auto const non_blank_row_offsets = io::csv::gpu::remove_blank_rows(parse_opts.view(), d_data, all_row_offsets, stream); auto row_offsets = selected_rows_offsets{std::move(all_row_offsets), non_blank_row_offsets}; // Remove header rows and extract header size_t const header_row_index = std::max<size_t>(header_rows, 1) - 1; if (header_row_index + 1 < row_offsets.size()) { CUDF_CUDA_TRY(cudaMemcpyAsync(row_ctx.host_ptr(), row_offsets.data() + header_row_index, 2 * sizeof(uint64_t), cudaMemcpyDefault, stream.value())); stream.synchronize(); auto const header_start = buffer_pos + row_ctx[0]; auto const header_end = buffer_pos + row_ctx[1]; CUDF_EXPECTS(header_start <= header_end && header_end <= data.size(), "Invalid csv header location"); header.assign(data.begin() + header_start, data.begin() + header_end); if (header_rows > 0) { row_offsets.erase_first_n(header_rows); } } // Apply num_rows limit if (num_rows >= 0 && static_cast<size_t>(num_rows) < row_offsets.size() - 1) { row_offsets.shrink(num_rows + 1); } return {std::move(d_data), std::move(row_offsets)}; } std::pair<rmm::device_uvector<char>, selected_rows_offsets> select_data_and_row_offsets( cudf::io::datasource* source, csv_reader_options const& reader_opts, std::vector<char>& header, parse_options const& parse_opts, rmm::cuda_stream_view stream) { auto range_offset = reader_opts.get_byte_range_offset(); auto range_size = reader_opts.get_byte_range_size(); auto range_size_padded = reader_opts.get_byte_range_size_with_padding(); auto skip_rows = reader_opts.get_skiprows(); auto skip_end_rows = reader_opts.get_skipfooter(); auto num_rows = reader_opts.get_nrows(); if (range_offset > 0 || range_size > 0) { CUDF_EXPECTS(reader_opts.get_compression() == compression_type::NONE, "Reading compressed data using `byte range` is unsupported"); } // Transfer source data to GPU if (!source->is_empty()) { auto buffer = source->host_read(range_offset, range_size_padded != 0 ? range_size_padded : source->size()); auto h_data = host_span<char const>(reinterpret_cast<char const*>(buffer->data()), buffer->size()); std::vector<uint8_t> h_uncomp_data_owner; if (reader_opts.get_compression() != compression_type::NONE) { h_uncomp_data_owner = decompress(reader_opts.get_compression(), {buffer->data(), buffer->size()}); h_data = {reinterpret_cast<char const*>(h_uncomp_data_owner.data()), h_uncomp_data_owner.size()}; buffer.reset(); } // check for and skip UTF-8 BOM uint8_t const UTF8_BOM[] = {0xEF, 0xBB, 0xBF}; if (h_data.size() >= sizeof(UTF8_BOM) && memcmp(h_data.data(), UTF8_BOM, sizeof(UTF8_BOM)) == 0) { h_data = h_data.subspan(sizeof(UTF8_BOM), h_data.size() - sizeof(UTF8_BOM)); } // None of the parameters for row selection is used, we are parsing the entire file bool const load_whole_file = range_offset == 0 && range_size == 0 && skip_rows <= 0 && skip_end_rows <= 0 && num_rows == -1; // With byte range, find the start of the first data row size_t const data_start_offset = (range_offset != 0) ? find_first_row_start(parse_opts.terminator, h_data) : 0; // TODO: Allow parsing the header outside the mapped range CUDF_EXPECTS((range_offset == 0 || reader_opts.get_header() < 0), "byte_range offset with header not supported"); // Gather row offsets auto data_row_offsets = load_data_and_gather_row_offsets(reader_opts, parse_opts, header, h_data, data_start_offset, (range_size) ? range_size : h_data.size(), (skip_rows > 0) ? skip_rows : 0, num_rows, load_whole_file, stream); auto& row_offsets = data_row_offsets.second; // Exclude the rows that are to be skipped from the end if (skip_end_rows > 0 && static_cast<size_t>(skip_end_rows) < row_offsets.size()) { row_offsets.shrink(row_offsets.size() - skip_end_rows); } return data_row_offsets; } return {rmm::device_uvector<char>{0, stream}, selected_rows_offsets{stream}}; } void select_data_types(host_span<data_type const> user_dtypes, host_span<column_parse::flags> column_flags, host_span<data_type> column_types) { if (user_dtypes.empty()) { return; } CUDF_EXPECTS(user_dtypes.size() == 1 || user_dtypes.size() == column_flags.size(), "Specify data types for all columns in file, or use a dictionary/map"); for (auto col_idx = 0u; col_idx < column_flags.size(); ++col_idx) { if (column_flags[col_idx] & column_parse::enabled) { // If it's a single dtype, assign that dtype to all active columns auto const& dtype = user_dtypes.size() == 1 ? user_dtypes[0] : user_dtypes[col_idx]; column_types[col_idx] = dtype; // Reset the inferred flag, no need to infer the types from the data column_flags[col_idx] &= ~column_parse::inferred; } } } void get_data_types_from_column_names(std::map<std::string, data_type> const& user_dtypes, host_span<std::string const> column_names, host_span<column_parse::flags> column_flags, host_span<data_type> column_types) { if (user_dtypes.empty()) { return; } for (auto col_idx = 0u; col_idx < column_flags.size(); ++col_idx) { if (column_flags[col_idx] & column_parse::enabled) { auto const col_type_it = user_dtypes.find(column_names[col_idx]); if (col_type_it != user_dtypes.end()) { // Assign the type from the map column_types[col_idx] = col_type_it->second; // Reset the inferred flag, no need to infer the types from the data column_flags[col_idx] &= ~column_parse::inferred; } } } } void infer_column_types(parse_options const& parse_opts, host_span<column_parse::flags const> column_flags, device_span<char const> data, device_span<uint64_t const> row_offsets, int32_t num_records, data_type timestamp_type, host_span<data_type> column_types, rmm::cuda_stream_view stream) { if (num_records == 0) { for (auto col_idx = 0u; col_idx < column_flags.size(); ++col_idx) { if (column_flags[col_idx] & column_parse::inferred) { column_types[col_idx] = data_type(cudf::type_id::STRING); } } return; } auto const num_inferred_columns = std::count_if(column_flags.begin(), column_flags.end(), [](auto& flags) { return flags & column_parse::inferred; }); if (num_inferred_columns == 0) { return; } auto const column_stats = cudf::io::csv::gpu::detect_column_types( parse_opts.view(), data, make_device_uvector_async(column_flags, stream, rmm::mr::get_current_device_resource()), row_offsets, num_inferred_columns, stream); stream.synchronize(); auto inf_col_idx = 0; for (auto col_idx = 0u; col_idx < column_flags.size(); ++col_idx) { if (not(column_flags[col_idx] & column_parse::inferred)) { continue; } auto const& stats = column_stats[inf_col_idx++]; if (stats.null_count == num_records or stats.total_count() == 0) { // Entire column is NULL; allocate the smallest amount of memory column_types[col_idx] = data_type(cudf::type_id::INT8); } else if (stats.string_count > 0L) { column_types[col_idx] = data_type(cudf::type_id::STRING); } else if (stats.datetime_count > 0L) { column_types[col_idx] = timestamp_type.id() == cudf::type_id::EMPTY ? data_type(cudf::type_id::TIMESTAMP_NANOSECONDS) : timestamp_type; } else if (stats.bool_count > 0L) { column_types[col_idx] = data_type(cudf::type_id::BOOL8); } else if (stats.float_count > 0L) { column_types[col_idx] = data_type(cudf::type_id::FLOAT64); } else if (stats.big_int_count == 0) { column_types[col_idx] = data_type(cudf::type_id::INT64); } else if (stats.big_int_count != 0 && stats.negative_small_int_count != 0) { column_types[col_idx] = data_type(cudf::type_id::STRING); } else { // Integers are stored as 64-bit to conform to PANDAS column_types[col_idx] = data_type(cudf::type_id::UINT64); } } } std::vector<column_buffer> decode_data(parse_options const& parse_opts, std::vector<column_parse::flags> const& column_flags, std::vector<std::string> const& column_names, device_span<char const> data, device_span<uint64_t const> row_offsets, host_span<data_type const> column_types, int32_t num_records, int32_t num_actual_columns, int32_t num_active_columns, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { // Alloc output; columns' data memory is still expected for empty dataframe std::vector<column_buffer> out_buffers; out_buffers.reserve(column_types.size()); for (int col = 0, active_col = 0; col < num_actual_columns; ++col) { if (column_flags[col] & column_parse::enabled) { auto out_buffer = column_buffer(column_types[active_col], num_records, true, stream, mr); out_buffer.name = column_names[col]; out_buffers.emplace_back(std::move(out_buffer)); active_col++; } } thrust::host_vector<void*> h_data(num_active_columns); thrust::host_vector<bitmask_type*> h_valid(num_active_columns); for (int i = 0; i < num_active_columns; ++i) { h_data[i] = out_buffers[i].data(); h_valid[i] = out_buffers[i].null_mask(); } auto d_valid_counts = cudf::detail::make_zeroed_device_uvector_async<size_type>( num_active_columns, stream, rmm::mr::get_current_device_resource()); cudf::io::csv::gpu::decode_row_column_data( parse_opts.view(), data, make_device_uvector_async(column_flags, stream, rmm::mr::get_current_device_resource()), row_offsets, make_device_uvector_async(column_types, stream, rmm::mr::get_current_device_resource()), make_device_uvector_async(h_data, stream, rmm::mr::get_current_device_resource()), make_device_uvector_async(h_valid, stream, rmm::mr::get_current_device_resource()), d_valid_counts, stream); auto const h_valid_counts = cudf::detail::make_std_vector_sync(d_valid_counts, stream); for (int i = 0; i < num_active_columns; ++i) { out_buffers[i].null_count() = num_records - h_valid_counts[i]; } return out_buffers; } std::vector<data_type> determine_column_types(csv_reader_options const& reader_opts, parse_options const& parse_opts, host_span<std::string const> column_names, device_span<char const> data, device_span<uint64_t const> row_offsets, int32_t num_records, host_span<column_parse::flags> column_flags, rmm::cuda_stream_view stream) { std::vector<data_type> column_types(column_flags.size()); std::visit(cudf::detail::visitor_overload{ [&](std::vector<data_type> const& user_dtypes) { return select_data_types(user_dtypes, column_flags, column_types); }, [&](std::map<std::string, data_type> const& user_dtypes) { return get_data_types_from_column_names( user_dtypes, column_names, column_flags, column_types); }}, reader_opts.get_dtypes()); infer_column_types(parse_opts, column_flags, data, row_offsets, num_records, reader_opts.get_timestamp_type(), column_types, stream); // compact column_types to only include active columns std::vector<data_type> active_col_types; std::copy_if(column_types.cbegin(), column_types.cend(), std::back_inserter(active_col_types), [&column_flags, &types = std::as_const(column_types)](auto& dtype) { auto const idx = std::distance(types.data(), &dtype); return column_flags[idx] & column_parse::enabled; }); return active_col_types; } table_with_metadata read_csv(cudf::io::datasource* source, csv_reader_options const& reader_opts, parse_options const& parse_opts, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { std::vector<char> header; auto const data_row_offsets = select_data_and_row_offsets(source, reader_opts, header, parse_opts, stream); auto const& data = data_row_offsets.first; auto const& row_offsets = data_row_offsets.second; auto const unique_use_cols_indexes = std::set(reader_opts.get_use_cols_indexes().cbegin(), reader_opts.get_use_cols_indexes().cend()); auto const detected_column_names = get_column_names(header, parse_opts.view(), reader_opts.get_header(), reader_opts.get_prefix()); auto const opts_have_all_col_names = not reader_opts.get_names().empty() and ( // no data to detect (the number of) columns detected_column_names.empty() or // number of user specified names matches what is detected reader_opts.get_names().size() == detected_column_names.size() or // Columns are not selected by indices; read first reader_opts.get_names().size() columns unique_use_cols_indexes.empty()); auto column_names = opts_have_all_col_names ? reader_opts.get_names() : detected_column_names; auto const num_actual_columns = static_cast<int32_t>(column_names.size()); auto num_active_columns = num_actual_columns; auto column_flags = std::vector<column_parse::flags>( num_actual_columns, column_parse::enabled | column_parse::inferred); // User did not pass column names to override names in the file // Process names from the file to remove empty and duplicated strings if (not opts_have_all_col_names) { std::vector<size_t> col_loop_order(column_names.size()); auto unnamed_it = std::copy_if( thrust::make_counting_iterator<size_t>(0), thrust::make_counting_iterator<size_t>(column_names.size()), col_loop_order.begin(), [&column_names](auto col_idx) -> bool { return not column_names[col_idx].empty(); }); // Rename empty column names to "Unnamed: col_index" std::copy_if(thrust::make_counting_iterator<size_t>(0), thrust::make_counting_iterator<size_t>(column_names.size()), unnamed_it, [&column_names](auto col_idx) -> bool { auto is_empty = column_names[col_idx].empty(); if (is_empty) column_names[col_idx] = string("Unnamed: ") + std::to_string(col_idx); return is_empty; }); // Looking for duplicates std::unordered_map<string, int> col_names_counts; if (!reader_opts.is_enabled_mangle_dupe_cols()) { for (auto& col_name : column_names) { if (++col_names_counts[col_name] > 1) { CUDF_LOG_WARN("Multiple columns with name {}; only the first appearance is parsed", col_name); auto const idx = &col_name - column_names.data(); column_flags[idx] = column_parse::disabled; } } } else { // For constant/linear search. std::unordered_multiset<std::string> header(column_names.begin(), column_names.end()); for (auto const col_idx : col_loop_order) { auto col = column_names[col_idx]; auto cur_count = col_names_counts[col]; if (cur_count > 0) { auto const old_col = col; // Rename duplicates of column X as X.1, X.2, ...; First appearance stays as X while (cur_count > 0) { col_names_counts[old_col] = cur_count + 1; col = old_col + "." + std::to_string(cur_count); if (header.find(col) != header.end()) { cur_count++; } else { cur_count = col_names_counts[col]; } } if (auto pos = header.find(old_col); pos != header.end()) { header.erase(pos); } header.insert(col); column_names[col_idx] = col; } col_names_counts[col] = cur_count + 1; } } // Update the number of columns to be processed, if some might have been removed if (!reader_opts.is_enabled_mangle_dupe_cols()) { num_active_columns = col_names_counts.size(); } } // User can specify which columns should be parsed auto const unique_use_cols_names = std::unordered_set(reader_opts.get_use_cols_names().cbegin(), reader_opts.get_use_cols_names().cend()); auto const is_column_selection_used = not unique_use_cols_names.empty() or not unique_use_cols_indexes.empty(); // Reset flags and output column count; columns will be reactivated based on the selection options if (is_column_selection_used) { std::fill(column_flags.begin(), column_flags.end(), column_parse::disabled); num_active_columns = 0; } // Column selection via column indexes if (not unique_use_cols_indexes.empty()) { // Users can pass names for the selected columns only, if selecting column by their indices auto const are_opts_col_names_used = not reader_opts.get_names().empty() and not opts_have_all_col_names; CUDF_EXPECTS(not are_opts_col_names_used or reader_opts.get_names().size() == unique_use_cols_indexes.size(), "Specify names of all columns in the file, or names of all selected columns"); for (auto const index : unique_use_cols_indexes) { column_flags[index] = column_parse::enabled | column_parse::inferred; if (are_opts_col_names_used) { column_names[index] = reader_opts.get_names()[num_active_columns]; } ++num_active_columns; } } // Column selection via column names if (not unique_use_cols_names.empty()) { for (auto const& name : unique_use_cols_names) { auto const it = std::find(column_names.cbegin(), column_names.cend(), name); CUDF_EXPECTS(it != column_names.end(), "Nonexistent column selected"); auto const col_idx = std::distance(column_names.cbegin(), it); if (column_flags[col_idx] == column_parse::disabled) { column_flags[col_idx] = column_parse::enabled | column_parse::inferred; ++num_active_columns; } } } // User can specify which columns should be read as datetime if (!reader_opts.get_parse_dates_indexes().empty() || !reader_opts.get_parse_dates_names().empty()) { for (auto const index : reader_opts.get_parse_dates_indexes()) { column_flags[index] |= column_parse::as_datetime; } for (auto const& name : reader_opts.get_parse_dates_names()) { auto it = std::find(column_names.begin(), column_names.end(), name); if (it != column_names.end()) { column_flags[it - column_names.begin()] |= column_parse::as_datetime; } } } // User can specify which columns should be parsed as hexadecimal if (!reader_opts.get_parse_hex_indexes().empty() || !reader_opts.get_parse_hex_names().empty()) { for (auto const index : reader_opts.get_parse_hex_indexes()) { column_flags[index] |= column_parse::as_hexadecimal; } for (auto const& name : reader_opts.get_parse_hex_names()) { auto it = std::find(column_names.begin(), column_names.end(), name); if (it != column_names.end()) { column_flags[it - column_names.begin()] |= column_parse::as_hexadecimal; } } } // Return empty table rather than exception if nothing to load if (num_active_columns == 0) { return {std::make_unique<table>(), {}}; } // Exclude the end-of-data row from number of rows with actual data auto const num_records = std::max(row_offsets.size(), 1ul) - 1; auto const column_types = determine_column_types( reader_opts, parse_opts, column_names, data, row_offsets, num_records, column_flags, stream); auto metadata = table_metadata{}; auto out_columns = std::vector<std::unique_ptr<cudf::column>>(); out_columns.reserve(column_types.size()); if (num_records != 0) { auto out_buffers = decode_data( // parse_opts, column_flags, column_names, data, row_offsets, column_types, num_records, num_actual_columns, num_active_columns, stream, mr); for (size_t i = 0; i < column_types.size(); ++i) { metadata.schema_info.emplace_back(out_buffers[i].name); if (column_types[i].id() == type_id::STRING && parse_opts.quotechar != '\0' && parse_opts.doublequote) { // PANDAS' default behavior of enabling doublequote for two consecutive // quotechars in quoted fields results in reduction to a single quotechar // TODO: Would be much more efficient to perform this operation in-place // during the conversion stage std::string const quotechar(1, parse_opts.quotechar); std::string const dblquotechar(2, parse_opts.quotechar); std::unique_ptr<column> col = cudf::make_strings_column(*out_buffers[i]._strings, stream); out_columns.emplace_back( cudf::strings::detail::replace(col->view(), dblquotechar, quotechar, -1, stream, mr)); } else { out_columns.emplace_back(make_column(out_buffers[i], nullptr, std::nullopt, stream)); } } } else { // Create empty columns for (size_t i = 0; i < column_types.size(); ++i) { out_columns.emplace_back(make_empty_column(column_types[i])); } // Handle empty metadata for (int col = 0; col < num_actual_columns; ++col) { if (column_flags[col] & column_parse::enabled) { metadata.schema_info.emplace_back(column_names[col]); } } } return {std::make_unique<table>(std::move(out_columns)), std::move(metadata)}; } /** * @brief Create a serialized trie for N/A value matching, based on the options. */ cudf::detail::trie create_na_trie(char quotechar, csv_reader_options const& reader_opts, rmm::cuda_stream_view stream) { // Default values to recognize as null values static std::vector<std::string> const default_na_values{"", "#N/A", "#N/A N/A", "#NA", "-1.#IND", "-1.#QNAN", "-NaN", "-nan", "1.#IND", "1.#QNAN", "<NA>", "N/A", "NA", "NULL", "NaN", "n/a", "nan", "null"}; if (!reader_opts.is_enabled_na_filter()) { return cudf::detail::trie(0, stream); } std::vector<std::string> na_values = reader_opts.get_na_values(); if (reader_opts.is_enabled_keep_default_na()) { na_values.insert(na_values.end(), default_na_values.begin(), default_na_values.end()); } // Pandas treats empty strings as N/A if empty fields are treated as N/A if (std::find(na_values.begin(), na_values.end(), "") != na_values.end()) { na_values.push_back(std::string(2, quotechar)); } return cudf::detail::create_serialized_trie(na_values, stream); } parse_options make_parse_options(csv_reader_options const& reader_opts, rmm::cuda_stream_view stream) { auto parse_opts = parse_options{}; if (reader_opts.is_enabled_delim_whitespace()) { parse_opts.delimiter = ' '; parse_opts.multi_delimiter = true; } else { parse_opts.delimiter = reader_opts.get_delimiter(); parse_opts.multi_delimiter = false; } parse_opts.terminator = reader_opts.get_lineterminator(); if (reader_opts.get_quotechar() != '\0' && reader_opts.get_quoting() != quote_style::NONE) { parse_opts.quotechar = reader_opts.get_quotechar(); parse_opts.keepquotes = false; parse_opts.doublequote = reader_opts.is_enabled_doublequote(); } else { parse_opts.quotechar = '\0'; parse_opts.keepquotes = true; parse_opts.doublequote = false; } parse_opts.skipblanklines = reader_opts.is_enabled_skip_blank_lines(); parse_opts.comment = reader_opts.get_comment(); parse_opts.dayfirst = reader_opts.is_enabled_dayfirst(); parse_opts.decimal = reader_opts.get_decimal(); parse_opts.thousands = reader_opts.get_thousands(); CUDF_EXPECTS(parse_opts.decimal != parse_opts.delimiter, "Decimal point cannot be the same as the delimiter"); CUDF_EXPECTS(parse_opts.thousands != parse_opts.delimiter, "Thousands separator cannot be the same as the delimiter"); // Handle user-defined true values, whereby field data is substituted with a // boolean true or numeric `1` value if (not reader_opts.get_true_values().empty()) { parse_opts.trie_true = cudf::detail::create_serialized_trie(reader_opts.get_true_values(), stream); } // Handle user-defined false values, whereby field data is substituted with a // boolean false or numeric `0` value if (not reader_opts.get_false_values().empty()) { parse_opts.trie_false = cudf::detail::create_serialized_trie(reader_opts.get_false_values(), stream); } // Handle user-defined N/A values, whereby field data is treated as null parse_opts.trie_na = create_na_trie(parse_opts.quotechar, reader_opts, stream); return parse_opts; } } // namespace table_with_metadata read_csv(std::unique_ptr<cudf::io::datasource>&& source, csv_reader_options const& options, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { auto parse_options = make_parse_options(options, stream); return read_csv(source.get(), options, parse_options, stream, mr); } } // namespace csv } // namespace detail } // namespace io } // namespace cudf
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/csv/durations.cu
/* * Copyright (c) 2020-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include <cudf/column/column_device_view.cuh> #include <cudf/column/column_factories.hpp> #include <cudf/detail/null_mask.hpp> #include <cudf/detail/nvtx/ranges.hpp> #include <cudf/strings/detail/convert/int_to_string.cuh> #include <cudf/strings/detail/strings_children.cuh> #include <cudf/strings/detail/utilities.cuh> #include <cudf/types.hpp> #include <rmm/cuda_stream_view.hpp> #include <thrust/for_each.h> #include <thrust/iterator/counting_iterator.h> #include <thrust/iterator/transform_iterator.h> namespace cudf { namespace io { namespace detail { namespace csv { namespace { // duration components timeparts structure struct alignas(4) duration_component { int32_t day; //-2,147,483,648 to 2,147,483,647 int32_t nanosecond; // 000000000 to 999999999 int8_t hour; // 00 to 23 int8_t minute; // 00 to 59 int8_t second; // 00 to 59 bool is_negative; // true/false }; template <typename T> __device__ void dissect_duration(T duration, duration_component* timeparts) { timeparts->is_negative = (duration < T{0}); timeparts->day = cuda::std::chrono::floor<duration_D>(duration).count(); if (cuda::std::is_same_v<T, duration_D>) return; // adjust for pandas format if (timeparts->is_negative) { duration = cuda::std::chrono::duration_cast<T>(duration % duration_D(1) + cuda::std::chrono::hours(24)); } duration_s seconds = cuda::std::chrono::duration_cast<duration_s>(duration); timeparts->hour = (cuda::std::chrono::duration_cast<cuda::std::chrono::hours>(seconds) % duration_D(1)).count(); timeparts->minute = (cuda::std::chrono::duration_cast<cuda::std::chrono::minutes>(seconds) % cuda::std::chrono::hours(1)) .count(); timeparts->second = (seconds % cuda::std::chrono::minutes(1)).count(); if (not cuda::std::is_same_v<T, duration_s>) { timeparts->nanosecond = (cuda::std::chrono::duration_cast<duration_ns>(duration) % duration_s(1)).count(); } } template <typename T> struct duration_to_string_size_fn { column_device_view const d_durations; __device__ size_type operator()(size_type idx) { if (d_durations.is_null(idx)) return 0; auto duration = d_durations.element<T>(idx); duration_component timeparts = {0}; // days, hours, minutes, seconds, nanoseconds(9) dissect_duration(duration, &timeparts); // [-] %d days [+]HH:MM:SS.mmmuuunnn return cudf::strings::detail::count_digits(timeparts.day) + 6 + timeparts.is_negative + 18; } }; template <typename T> struct duration_to_string_fn : public duration_to_string_size_fn<T> { int32_t const* d_offsets; char* d_chars; using duration_to_string_size_fn<T>::d_durations; duration_to_string_fn(column_device_view const d_durations, int32_t const* d_offsets, char* d_chars) : duration_to_string_size_fn<T>{d_durations}, d_offsets(d_offsets), d_chars(d_chars) { } __device__ char* int_to_2digitstr(int8_t value, char* str) { assert(value >= -99 && value <= 99); value = std::abs(value); str[0] = '0' + value / 10; str[1] = '0' + value % 10; return str + 2; } inline __device__ char* day(char* ptr, duration_component const* timeparts) { cudf::strings::detail::integer_to_string(timeparts->day, ptr); return (ptr + cudf::strings::detail::count_digits(timeparts->day)); } inline __device__ char* hour_24(char* ptr, duration_component const* timeparts) { return int_to_2digitstr(timeparts->hour, ptr); } inline __device__ char* minute(char* ptr, duration_component const* timeparts) { return int_to_2digitstr(timeparts->minute, ptr); } inline __device__ char* second(char* ptr, duration_component const* timeparts) { return int_to_2digitstr(timeparts->second, ptr); } inline __device__ char* nanosecond(char* ptr, duration_component const* timeparts) { auto value = timeparts->nanosecond; *ptr = '.'; for (int idx = 9; idx > 0; idx--) { *(ptr + idx) = '0' + std::abs(value % 10); value /= 10; } return ptr + 10; } inline __device__ char* pandas_format(duration_component const* timeparts, char* ptr) { // if (timeparts->is_negative) *ptr++ = '-'; ptr = day(ptr, timeparts); ptr = cudf::strings::detail::copy_and_increment(ptr, " days ", 6); if (timeparts->is_negative) *ptr++ = '+'; ptr = hour_24(ptr, timeparts); *ptr++ = ':'; ptr = minute(ptr, timeparts); *ptr++ = ':'; ptr = second(ptr, timeparts); return nanosecond(ptr, timeparts); } __device__ void operator()(size_type idx) { if (d_durations.is_null(idx)) return; auto duration = d_durations.template element<T>(idx); duration_component timeparts = {0}; // days, hours, minutes, seconds, nanoseconds(9) dissect_duration(duration, &timeparts); // convert to characters pandas_format(&timeparts, d_chars + d_offsets[idx]); } }; /** * @brief This dispatch method is for converting durations into strings. * * The template function declaration ensures only duration types are used. */ struct dispatch_from_durations_fn { template <typename T, std::enable_if_t<cudf::is_duration<T>()>* = nullptr> std::unique_ptr<column> operator()(column_view const& durations, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) const { size_type strings_count = durations.size(); auto column = column_device_view::create(durations, stream); auto d_column = *column; // copy null mask rmm::device_buffer null_mask = cudf::detail::copy_bitmask(durations, stream, mr); // build offsets column auto offsets_transformer_itr = thrust::make_transform_iterator( thrust::make_counting_iterator<int32_t>(0), duration_to_string_size_fn<T>{d_column}); auto [offsets_column, chars_bytes] = cudf::detail::make_offsets_child_column( offsets_transformer_itr, offsets_transformer_itr + strings_count, stream, mr); auto offsets_view = offsets_column->view(); auto d_new_offsets = offsets_view.template data<int32_t>(); // build chars column auto chars_column = strings::detail::create_chars_child_column(chars_bytes, stream, mr); auto chars_view = chars_column->mutable_view(); auto d_chars = chars_view.template data<char>(); thrust::for_each_n(rmm::exec_policy(stream), thrust::make_counting_iterator<size_type>(0), strings_count, duration_to_string_fn<T>{d_column, d_new_offsets, d_chars}); // return make_strings_column(strings_count, std::move(offsets_column), std::move(chars_column), durations.null_count(), std::move(null_mask)); } // non-duration types throw an exception template <typename T, std::enable_if_t<not cudf::is_duration<T>()>* = nullptr> std::unique_ptr<column> operator()(column_view const&, rmm::cuda_stream_view, rmm::mr::device_memory_resource*) const { CUDF_FAIL("Values for from_durations function must be a duration type."); } }; } // namespace std::unique_ptr<column> pandas_format_durations(column_view const& durations, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { size_type strings_count = durations.size(); if (strings_count == 0) return make_empty_column(type_id::STRING); return type_dispatcher(durations.type(), dispatch_from_durations_fn{}, durations, stream, mr); } } // namespace csv } // namespace detail } // namespace io } // namespace cudf
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/csv/datetime.cuh
/* * Copyright (c) 2019-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #pragma once #include <io/utilities/parsing_utils.cuh> #include <io/utilities/time_utils.cuh> #include <cudf/fixed_point/fixed_point.hpp> #include <thrust/equal.h> #include <thrust/execution_policy.h> #include <thrust/find.h> #include <thrust/reduce.h> namespace cudf { namespace io { /** * @brief Parses non-negative integral vales. * * This helper function is only intended to handle positive integers. The input * character string is expected to be well-formed. * * @param begin Pointer to the first element of the string * @param end Pointer to the first element after the string * @return The parsed and converted value */ template <typename T> __inline__ __device__ T to_non_negative_integer(char const* begin, char const* end) { T value = 0; for (; begin < end; ++begin) { if (*begin >= '0' && *begin <= '9') { value *= 10; value += *begin - '0'; } } return value; } /** * @brief Extracts the Day, Month, and Year from a string. * * This function takes a string and produces a `year_month_day` representation. * Acceptable formats are a combination of `YYYY`, `M`, `MM`, `D` and `DD` with * `/` or `-` as separators. Data with only year and month (no day) is also valid. * * @param begin Pointer to the first element of the string * @param end Pointer to the first element after the string * @param dayfirst Flag indicating that first field is the day * @return Extracted year, month and day in `cuda::std::chrono::year_month_day` format */ __inline__ __device__ cuda::std::chrono::year_month_day extract_date(char const* begin, char const* end, bool dayfirst) { using namespace cuda::std::chrono; char sep = '/'; auto sep_pos = thrust::find(thrust::seq, begin, end, sep); if (sep_pos == end) { sep = '-'; sep_pos = thrust::find(thrust::seq, begin, end, sep); } year y; month m; day d; //--- is year the first filed? if ((sep_pos - begin) == 4) { y = year{to_non_negative_integer<int32_t>(begin, sep_pos)}; // year is signed // Month auto s2 = sep_pos + 1; sep_pos = thrust::find(thrust::seq, s2, end, sep); if (sep_pos == end) { //--- Data is just Year and Month - no day m = month{to_non_negative_integer<uint32_t>(s2, end)}; // month and day are unsigned d = day{1}; } else { m = month{to_non_negative_integer<uint32_t>(s2, sep_pos)}; d = day{to_non_negative_integer<uint32_t>((sep_pos + 1), end)}; } } else { //--- if the dayfirst flag is set, then restricts the format options if (dayfirst) { d = day{to_non_negative_integer<uint32_t>(begin, sep_pos)}; auto s2 = sep_pos + 1; sep_pos = thrust::find(thrust::seq, s2, end, sep); m = month{to_non_negative_integer<uint32_t>(s2, sep_pos)}; y = year{to_non_negative_integer<int32_t>((sep_pos + 1), end)}; } else { m = month{to_non_negative_integer<uint32_t>(begin, sep_pos)}; auto s2 = sep_pos + 1; sep_pos = thrust::find(thrust::seq, s2, end, sep); if (sep_pos == end) { //--- Data is just Year and Month - no day y = year{to_non_negative_integer<int32_t>(s2, end)}; d = day{1}; } else { d = day{to_non_negative_integer<uint32_t>(s2, sep_pos)}; y = year{to_non_negative_integer<int32_t>((sep_pos + 1), end)}; } } } return year_month_day{y, m, d}; } /** * @brief Parses a string to extract the hour, minute, second and millisecond time field * values of a day. * * Incoming format is expected to be `HH:MM:SS.MS`, with the latter second and millisecond fields * optional. Each time field can be a single, double, or triple (in the case of milliseconds) * digits. 12-hr and 24-hr time format is detected via the absence or presence of AM/PM characters * at the end. * * @param begin Pointer to the first element of the string * @param end Pointer to the first element after the string * @return Extracted hours, minutes, seconds and milliseconds of `chrono::hh_mm_ss` type with a * precision of milliseconds */ __inline__ __device__ cuda::std::chrono::hh_mm_ss<duration_ms> extract_time_of_day( char const* begin, char const* end) { constexpr char sep = ':'; // Adjust for AM/PM and any whitespace before duration_h d_h{0}; auto last = end - 1; if (*last == 'M' || *last == 'm') { if (*(last - 1) == 'P' || *(last - 1) == 'p') { d_h = duration_h{12}; } last = last - 2; while (*last == ' ') { --last; } } end = last + 1; // Find hour-minute separator auto const hm_sep = thrust::find(thrust::seq, begin, end, sep); // Extract hours d_h += cudf::duration_h{to_non_negative_integer<int>(begin, hm_sep)}; duration_m d_m{0}; duration_s d_s{0}; duration_ms d_ms{0}; // Find minute-second separator (if present) auto const ms_sep = thrust::find(thrust::seq, hm_sep + 1, end, sep); if (ms_sep == end) { d_m = duration_m{to_non_negative_integer<int32_t>(hm_sep + 1, end)}; } else { d_m = duration_m{to_non_negative_integer<int32_t>(hm_sep + 1, ms_sep)}; // Find second-millisecond separator (if present) auto const sms_sep = thrust::find(thrust::seq, ms_sep + 1, end, '.'); if (sms_sep == end) { d_s = duration_s{to_non_negative_integer<int64_t>(ms_sep + 1, end)}; } else { d_s = duration_s{to_non_negative_integer<int64_t>(ms_sep + 1, sms_sep)}; d_ms = duration_ms{to_non_negative_integer<int64_t>(sms_sep + 1, end)}; } } return cuda::std::chrono::hh_mm_ss<duration_ms>{d_h + d_m + d_s + d_ms}; } /** * @brief Checks whether `c` is decimal digit */ constexpr bool is_digit(char c) { return c >= '0' and c <= '9'; } /** * @brief Parses a datetime string and computes the corresponding timestamp. * * Acceptable date formats are a combination of `YYYY`, `M`, `MM`, `D` and `DD` with `/` or `-` as * separators. Input with only year and month (no day) is also valid. Character `T` or blank space * is expected to be the separator between date and time of day. Optional time of day information * like hours, minutes, seconds and milliseconds are expected to be `HH:MM:SS.MS`. Each time field * can be a single, double, or triple (in the case of milliseconds) digits. 12-hr and 24-hr time * format is detected via the absence or presence of AM/PM characters at the end. * * @tparam timestamp_type Type of output timestamp * @param begin Pointer to the first element of the string * @param end Pointer to the first element after the string * @param dayfirst Flag to indicate day/month or month/day order * @return Timestamp converted to `timestamp_type` */ template <typename timestamp_type> __inline__ __device__ timestamp_type to_timestamp(char const* begin, char const* end, bool dayfirst) { using duration_type = typename timestamp_type::duration; auto sep_pos = end; // Find end of the date portion int count = 0; bool digits_only = true; for (auto i = begin; i < end; ++i) { digits_only = digits_only and is_digit(*i); if (*i == 'T') { sep_pos = i; break; } else if (count == 3 && *i == ' ') { sep_pos = i; break; } else if ((*i == '/' || *i == '-') || (count == 2 && *i != ' ')) { count++; } } // Exit if the input string is digit-only if (digits_only) { return timestamp_type{ duration_type{to_non_negative_integer<typename timestamp_type::rep>(begin, end)}}; } auto ymd = extract_date(begin, sep_pos, dayfirst); timestamp_type answer{cuda::std::chrono::sys_days{ymd}}; // Extract time only if separator is present if (sep_pos != end) { auto t = extract_time_of_day(sep_pos + 1, end); answer += cuda::std::chrono::duration_cast<duration_type>(t.to_duration()); } return answer; } /** * @brief Parses the input string into an integral value of the given type. * * Moves the `begin` iterator past the parsed value. * * @param[in, out] begin Pointer to the first element of the string * @param end Pointer to the first element after the string * @return The parsed and converted value */ template <typename T> __inline__ __device__ T parse_integer(char const** begin, char const* end) { bool const is_negative = (**begin == '-'); T value = 0; auto cur = *begin + is_negative; while (cur < end) { if (*cur >= '0' && *cur <= '9') { value *= 10; value += *cur - '0'; } else break; ++cur; } *begin = cur; return is_negative ? -value : value; } /** * @brief Parses the input string into an integral value of the given type if the delimiter is * present. * * Moves the `begin` iterator past the parsed value. * * @param[in, out] begin Pointer to the first element of the string * @param end Pointer to the first element after the string * @param delimiter delimiter character * @return The parsed and converted value, zero is delimiter is not present */ template <typename T> __inline__ __device__ T parse_optional_integer(char const** begin, char const* end, char delimiter) { if (**begin != delimiter) { return 0; } ++(*begin); return parse_integer<T>(begin, end); } /** * @brief Finds the first element after the leading space characters. * * @param begin Pointer to the first element of the string * @param end Pointer to the first element after the string * @return Pointer to the first character excluding any leading spaces */ __inline__ __device__ auto skip_spaces(char const* begin, char const* end) { return thrust::find_if(thrust::seq, begin, end, [](auto elem) { return elem != ' '; }); } /** * @brief Excludes the prefix from the input range if the string starts with the prefix. * * @tparam N length of the prefix, plus one * @param begin Pointer to the first element of the string * @param end Pointer to the first element after the string * @param prefix String we're searching for at the start of the input range * @return Pointer to the start of the string excluding the prefix */ template <int N> __inline__ __device__ auto skip_if_starts_with(char const* begin, char const* end, char const (&prefix)[N]) { static constexpr size_t prefix_len = N - 1; if (end - begin < prefix_len) return begin; return thrust::equal(thrust::seq, begin, begin + prefix_len, prefix) ? begin + prefix_len : begin; } /** * @brief Parses the input string into a duration of `duration_type`. * * The expected format can be one of the following: `DD days`, `DD days +HH:MM:SS.NS`, `DD days * HH:MM::SS.NS`, `HH:MM::SS.NS` and digits-only string. Note `DD` and optional `NS` field can * contain arbitrary number of digits while `HH`, `MM` and `SS` can be single or double digits. * * @tparam duration_type Type of the parsed duration * @param begin Pointer to the first element of the string * @param end Pointer to the first element after the string * @return The parsed duration in `duration_type` */ template <typename duration_type> __inline__ __device__ duration_type to_duration(char const* begin, char const* end) { using cuda::std::chrono::duration_cast; // %d days [+]%H:%M:%S.n => %d days, %d days [+]%H:%M:%S, %H:%M:%S.n, %H:%M:%S, %value. constexpr char sep = ':'; // single pass to parse days, hour, minute, seconds, nanosecond auto cur = begin; auto const value = parse_integer<int32_t>(&cur, end); cur = skip_spaces(cur, end); if (std::is_same_v<duration_type, cudf::duration_D> || cur >= end) { return duration_type{static_cast<typename duration_type::rep>(value)}; } // " days [+]" auto const after_days_sep = skip_if_starts_with(cur, end, "days"); auto const has_days_seperator = (after_days_sep != cur); cur = skip_spaces(after_days_sep, end); cur += (*cur == '+'); duration_D d_d{0}; duration_h d_h{0}; if (has_days_seperator) { d_d = duration_D{value}; d_h = duration_h{parse_integer<int32_t>(&cur, end)}; } else { d_h = duration_h{value}; } duration_m d_m{parse_optional_integer<int32_t>(&cur, end, sep)}; duration_s d_s{parse_optional_integer<int64_t>(&cur, end, sep)}; // Convert all durations to the given type auto output_d = duration_cast<duration_type>(d_d + d_h + d_m + d_s); if constexpr (std::is_same_v<duration_type, cudf::duration_s>) { return output_d; } auto const d_ns = (*cur != '.') ? duration_ns{0} : [&]() { auto const start_subsecond = ++cur; auto const unscaled_subseconds = parse_integer<int64_t>(&cur, end); auto const scale = min(9L, cur - start_subsecond) - 9; auto const rescaled = numeric::decimal64{unscaled_subseconds, numeric::scale_type{scale}}; return duration_ns{rescaled.value()}; }(); return output_d + duration_cast<duration_type>(d_ns); } } // namespace io } // namespace cudf
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/avro/avro.cpp
/* * Copyright (c) 2019-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "avro.hpp" #include <cstring> #include <unordered_map> namespace cudf { namespace io { namespace avro { template <> uint64_t container::get_encoded() { uint64_t val = 0; for (auto len = 0; len < 64; len += 7) { // 64-bit int since shift left is upto 64. uint64_t const byte = get_raw<uint8_t>(); val |= (byte & 0x7f) << len; if (byte < 0x80) break; } return val; } template <> int64_t container::get_encoded() { auto const uval = get_encoded<uint64_t>(); return (int64_t)((uval >> 1u) ^ -(int64_t)(uval & 1)); } template <> std::string container::get_encoded() { auto const len = [&] { auto const len = get_encoded<uint64_t>(); return (len & 1) || (m_cur >= m_end) ? 0 : std::min(len >> 1, static_cast<uint64_t>(m_end - m_cur)); }(); auto const s = reinterpret_cast<char const*>(m_cur); m_cur += len; return std::string(s, len); } /** * @brief AVRO file metadata parser * * @param[out] md parsed avro file metadata * @param[in] max_num_rows maximum number of rows * @param[in] first_row drop blocks below first_row * * @returns true if successful, false if error */ bool container::parse(file_metadata* md, size_t max_num_rows, size_t first_row) { constexpr uint32_t avro_magic = (('O' << 0) | ('b' << 8) | ('j' << 16) | (0x01 << 24)); uint32_t sig4 = get_raw<uint8_t>(); sig4 |= get_raw<uint8_t>() << 8; sig4 |= get_raw<uint8_t>() << 16; sig4 |= get_raw<uint8_t>() << 24; if (sig4 != avro_magic) { return false; } for (;;) { auto num_md_items = static_cast<uint32_t>(get_encoded<int64_t>()); if (num_md_items == 0) { break; } for (uint32_t i = 0; i < num_md_items; i++) { auto const key = get_encoded<std::string>(); auto const value = get_encoded<std::string>(); if (key == "avro.codec") { md->codec = value; } else if (key == "avro.schema") { schema_parser sp; if (!sp.parse(md->schema, value)) { return false; } } else { // printf("\"%s\" = \"%s\"\n", key.c_str(), value.c_str()); md->user_data.emplace(key, value); } } } // Save the first sync markers in the metadata; we compare them to other // sync markers that should be present at the end of a block. If they // differ, the data should be interpreted as corrupted. md->sync_marker[0] = get_raw<uint64_t>(); md->sync_marker[1] = get_raw<uint64_t>(); // Initialize remaining metadata fields. md->metadata_size = m_cur - m_base; md->skip_rows = first_row; md->total_num_rows = 0; // Enumerate the blocks in this file. Each block starts with a count of // objects (rows) in the block (uint64_t), and then the total size in bytes // of the block (uint64_t). We walk each block and do the following: // 1. Capture the total number of rows present across all blocks. // 2. Add each block to the metadata's list of blocks. // 3. Handle the case where we've been asked to skip or limit rows. // 4. Verify sync markers at the end of each block. // // A row offset is also maintained, and added to each block. This reflects // the absolute offset that needs to be added to any given row in order to // get the row's index within the destination array. See `dst_row` in // `avro_decode_row()` for more information. // // N.B. "object" and "row" are used interchangeably here; "object" is // avro nomenclature, "row" is ours. // // N.B. If we're skipping rows, we ignore blocks (i.e. don't add them to // md->block_list) that precede the block containing the first row // we're interested in. // // Number of rows in the current block. uint32_t num_rows = 0; // Absolute row offset of the current block relative to all blocks selected by // the skip rows/limit rows constraints, if any. Otherwise, absolute row // offset relative to all blocks. uint32_t row_offset = 0; // Maximum block size in bytes encountered whilst processing all blocks // selected by the skip rows/limit rows constraints, if any. Otherwise, // maximum block size across all blocks. uint32_t max_block_size = 0; // Accumulates the total number of rows across all blocks selected by the skip // rows/limit rows constraints, if any. Otherwise, total number of rows across // all blocks. size_t total_object_count = 0; // N.B. The 18 below is (presumably) intended to account for the two 64-bit // object count and block size integers (16 bytes total), and then an // additional two bytes to represent the smallest possible row size. while (m_cur + 18 < m_end && total_object_count < max_num_rows) { auto const object_count = static_cast<uint32_t>(get_encoded<int64_t>()); auto const block_size = static_cast<uint32_t>(get_encoded<int64_t>()); auto const next_end = m_cur + block_size + 16; // Abort on terminal conditions. We keep these as separate lines instead of // combining them into a single if in order to facilitate setting specific // line breakpoints in the debugger. if (block_size <= 0) { return false; } if (object_count <= 0) { return false; } if (next_end > m_end) { return false; } // Update our total row count. This is only captured for information // purposes. md->total_num_rows += object_count; if (object_count <= first_row) { // We've been asked to skip rows, and we haven't yet reached our desired // number of rows to skip. Subtract this block's rows (`object_count`) // from the remaining rows to skip (`first_row`). Do not add this block // to our block list. first_row -= object_count; } else { // Either we weren't asked to skip rows, or we were, but we've already hit // our target number of rows to skip. Add this block to our block list. max_block_size = std::max(max_block_size, block_size); total_object_count += object_count; if (!md->block_list.size()) { // This is the first block, so add it to our list with the current value // of `first_row`, which will reflect the number of rows to skip *in // this block*. m_start = m_cur; total_object_count -= first_row; num_rows = total_object_count; CUDF_EXPECTS(row_offset == 0, "Invariant check failed: row_offset != 0"); if ((max_num_rows > 0) && (max_num_rows < total_object_count)) { num_rows = max_num_rows; } md->block_list.emplace_back(m_cur - m_base, block_size, row_offset, first_row, num_rows); first_row = 0; row_offset += num_rows; } else { // Not our first block; `first_row` should always be zero here. CUDF_EXPECTS(first_row == 0, "Invariant check failed: first_row != 0"); num_rows = object_count; if ((max_num_rows > 0) && (max_num_rows < total_object_count)) { num_rows -= (total_object_count - max_num_rows); } md->block_list.emplace_back(m_cur - m_base, block_size, row_offset, first_row, num_rows); row_offset += num_rows; } } m_cur += block_size; // Read the next sync markers and ensure they match the first ones we // encountered. If they don't, we have to assume the data is corrupted, // and thus, we terminate processing immediately. uint64_t const sync_marker[] = {get_raw<uint64_t>(), get_raw<uint64_t>()}; bool valid_sync_markers = ((sync_marker[0] == md->sync_marker[0]) && (sync_marker[1] == md->sync_marker[1])); if (!valid_sync_markers) { return false; } } md->max_block_size = max_block_size; // N.B. `total_object_count` has skip_rows applied to it at this point, i.e. // it represents the number of rows that will be returned *after* rows // have been skipped (if requested). if ((max_num_rows <= 0) || (max_num_rows > total_object_count)) { md->num_rows = total_object_count; } else { md->num_rows = max_num_rows; } md->total_data_size = m_cur - (m_base + md->metadata_size); CUDF_EXPECTS(m_cur > m_start, "Invariant check failed: `m_cur > m_start` is false."); md->selected_data_size = m_cur - m_start; // Extract columns for (size_t i = 0; i < md->schema.size(); i++) { type_kind_e kind = md->schema[i].kind; logicaltype_kind_e logical_kind = md->schema[i].logical_kind; bool is_supported_kind = ((kind > type_null) && (kind < type_record)); if (is_supported_logical_type(logical_kind) || is_supported_kind) { column_desc col; int parent_idx = md->schema[i].parent_idx; col.schema_data_idx = (int32_t)i; col.schema_null_idx = -1; col.parent_union_idx = -1; col.name = md->schema[i].name; if (parent_idx >= 0) { while (parent_idx >= 0) { if (md->schema[parent_idx].kind == type_union) { std::size_t pos = parent_idx + 1; for (int num_children = md->schema[parent_idx].num_children; num_children > 0; --num_children) { int skip = 1; if (pos == i) { // parent_idx will always be pointing to our immediate parent // union at this point. col.parent_union_idx = parent_idx; } else if (md->schema[pos].kind == type_null) { col.schema_null_idx = pos; break; } do { skip = skip + md->schema[pos].num_children - 1; pos++; } while (skip != 0); } } // We want to "inherit" the column name from our parent union's // name, as long as we're not dealing with the root (parent_idx == 0) // or array entries. if ((parent_idx != 0 && md->schema[parent_idx].kind != type_array) || col.name.length() == 0) { if (col.name.length() > 0) { col.name.insert(0, 1, '.'); } col.name.insert(0, md->schema[parent_idx].name); } parent_idx = md->schema[parent_idx].parent_idx; } } md->columns.emplace_back(std::move(col)); } } return true; } /** * @brief Parser state */ enum json_state_e { state_attrname = 0, state_attrcolon, state_attrvalue, state_attrvalue_last, state_nextattr, state_nextsymbol, }; enum attrtype_e { attrtype_none = -1, attrtype_type = 0, attrtype_name, attrtype_fields, attrtype_symbols, attrtype_items, attrtype_logicaltype, }; /** * @brief AVRO JSON schema parser * * @param[out] schema parsed avro schema * @param[in] json_str avro schema (JSON string) * * @returns true if successful, false if error */ bool schema_parser::parse(std::vector<schema_entry>& schema, std::string const& json_str) { // Empty schema if (json_str == "[]") return true; char depthbuf[MAX_SCHEMA_DEPTH]; int depth = 0, parent_idx = -1, entry_idx = -1; json_state_e state = state_attrname; std::string str; std::unordered_map<std::string, type_kind_e> const typenames = { {"null", type_null}, {"boolean", type_boolean}, {"int", type_int}, {"long", type_long}, {"float", type_float}, {"double", type_double}, {"bytes", type_bytes}, {"string", type_string}, {"record", type_record}, {"enum", type_enum}, {"array", type_array}, {"union", type_union}, {"fixed", type_fixed}, {"decimal", type_decimal}, {"date", type_date}, {"time-millis", type_time_millis}, {"time-micros", type_time_micros}, {"timestamp-millis", type_timestamp_millis}, {"timestamp-micros", type_timestamp_micros}, {"local-timestamp-millis", type_local_timestamp_millis}, {"local-timestamp-micros", type_local_timestamp_micros}, {"duration", type_duration}}; std::unordered_map<std::string, attrtype_e> const attrnames = { {"type", attrtype_type}, {"name", attrtype_name}, {"fields", attrtype_fields}, {"symbols", attrtype_symbols}, {"items", attrtype_items}, {"logicalType", attrtype_logicaltype}}; attrtype_e cur_attr = attrtype_none; m_base = json_str.c_str(); m_cur = m_base; m_end = m_base + json_str.length(); while (more_data()) { int c = *m_cur++; switch (c) { case '"': str = get_str(); // printf("str: \"%s\" (cur_attr=%d, state=%d)\n", str.c_str(), cur_attr, state); if (state == state_attrname && cur_attr == attrtype_none && typenames.find(str) != typenames.end()) { cur_attr = attrtype_type; state = state_attrvalue_last; } if (state == state_attrname) { auto t = attrnames.find(str); cur_attr = (t == attrnames.end()) ? attrtype_none : t->second; state = state_attrcolon; } else if (state == state_attrvalue || state == state_attrvalue_last) { if (entry_idx < 0) { entry_idx = static_cast<int>(schema.size()); schema.emplace_back(type_not_set, parent_idx); if (parent_idx >= 0) { schema[parent_idx].num_children++; } } if (cur_attr == attrtype_type) { auto t = typenames.find(str); if (t == typenames.end()) return false; schema[entry_idx].kind = t->second; } else if (cur_attr == attrtype_logicaltype) { auto t = typenames.find(str); if (t == typenames.end()) return false; schema[entry_idx].logical_kind = static_cast<logicaltype_kind_e>(t->second); } else if (cur_attr == attrtype_name) { if (entry_idx < 0) return false; schema[entry_idx].name = std::move(str); } if (state == state_attrvalue_last) { entry_idx = -1; } state = state_nextattr; cur_attr = attrtype_none; } else if (state == state_nextsymbol) { if (entry_idx < 0) return false; schema[entry_idx].symbols.emplace_back(std::move(str)); } break; case ':': if (state != state_attrcolon) return false; state = state_attrvalue; break; case ',': if (state != state_nextsymbol) { if (state != state_nextattr) return false; state = state_attrname; } break; case '{': if (state == state_attrvalue && cur_attr == attrtype_type) { if (entry_idx < 0) { entry_idx = static_cast<int>(schema.size()); schema.emplace_back(type_record, parent_idx); if (parent_idx >= 0) { schema[parent_idx].num_children++; } } cur_attr = attrtype_none; state = state_attrname; } else if (state == state_attrvalue && cur_attr == attrtype_items && entry_idx >= 0) { // Treat array as a one-field record parent_idx = entry_idx; entry_idx = -1; cur_attr = attrtype_none; state = state_attrname; } if (depth >= MAX_SCHEMA_DEPTH || state != state_attrname) { return false; } depthbuf[depth++] = '{'; break; case '}': if (depth == 0 || state != state_nextattr || depthbuf[depth - 1] != '{') return false; --depth; if (entry_idx < 0) { parent_idx = (parent_idx >= 0) ? schema[parent_idx].parent_idx : -1; } else { entry_idx = -1; } break; case '[': if (state == state_attrname && cur_attr == attrtype_none) { cur_attr = attrtype_type; state = state_attrvalue; } if (depth >= MAX_SCHEMA_DEPTH || state != state_attrvalue) { return false; } depthbuf[depth++] = '['; if (cur_attr == attrtype_symbols) { state = state_nextsymbol; break; } else if (cur_attr == attrtype_type) { if (entry_idx < 0 || schema[entry_idx].kind != type_not_set) { entry_idx = static_cast<int>(schema.size()); schema.emplace_back(type_union, parent_idx); if (parent_idx >= 0) { schema[parent_idx].num_children++; } } else { schema[entry_idx].kind = type_union; } parent_idx = entry_idx; } else if (cur_attr != attrtype_fields || entry_idx < 0 || schema[entry_idx].kind < type_record) { return false; } else { parent_idx = entry_idx; } entry_idx = -1; cur_attr = attrtype_none; state = state_attrname; break; case ']': if (depth == 0 || (state != state_nextattr && state != state_nextsymbol) || depthbuf[depth - 1] != '[') return false; --depth; if (state == state_nextsymbol) { state = state_nextattr; } else if (parent_idx >= 0) { entry_idx = parent_idx; parent_idx = schema[parent_idx].parent_idx; } break; case ' ': case '\x09': case '\x0d': case '\x0a': // Ignore spaces, tabs and CRLF break; default: return false; } } // printf("schema (%d entries) = %s\n", (int)schema.size(), m_base); return true; } /** * @brief Parse a string * * @returns parsed string, consuming the terminating quote */ std::string schema_parser::get_str() { std::string s; char const* start = m_cur; char const* cur = start; while (cur < m_end && *cur++ != '"') ; int32_t len = static_cast<int32_t>(cur - start - 1); m_cur = cur; return s.assign(start, std::max(len, 0)); } } // namespace avro } // namespace io } // namespace cudf
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/avro/avro_gpu.cu
/* * Copyright (c) 2019-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "avro_gpu.hpp" #include <io/utilities/block_utils.cuh> #include <rmm/cuda_stream_view.hpp> using cudf::device_span; namespace cudf { namespace io { namespace avro { namespace gpu { constexpr int num_warps = 16; constexpr int max_shared_schema_len = 1000; /* * Avro varint encoding - see * https://avro.apache.org/docs/1.2.0/spec.html#binary_encoding */ static inline int64_t __device__ avro_decode_zigzag_varint(uint8_t const*& cur, uint8_t const* end) { uint64_t u = 0; if (cur < end) { u = *cur++; if (u > 0x7f) { uint64_t scale = 128; u &= 0x7f; while (cur < end) { uint32_t c = *cur++; u += (c & 0x7f) * scale; scale <<= 7; if (c < 0x80) break; } } } return (int64_t)((u >> 1u) ^ -(int64_t)(u & 1)); } /** * @brief Decode a row of values given an avro schema * * @param[in] schema Schema description * @param[in] schema_g Global schema in device mem * @param[in] schema_len Number of schema entries * @param[in] first_row First row to start saving decoded data * @param[in] row Current row * @param[in] end_row One past the last row to save * @param[in] row_offset Absolute row offset of this row in the * destination data. * @param[in] cur Current input data pointer * @param[in] end End of input data * @param[in] global_Dictionary Global dictionary entries * @param[out] skipped_row Whether the row was skipped; set to false * if the row was saved (caller should ensure * this is initialized to true) * * @return data pointer at the end of the row (start of next row) */ static uint8_t const* __device__ avro_decode_row(schemadesc_s const* schema, schemadesc_s* schema_g, uint32_t schema_len, size_t first_row, size_t row, size_t end_row, size_t row_offset, uint8_t const* cur, uint8_t const* end, device_span<string_index_pair const> global_dictionary, bool* skipped_row) { // `dst_row` depicts the offset of the decoded row in the destination // `dataptr` array, adjusted for skip rows, if applicable. For example, // if `row` == 5 and `first_row` == 3, then this is the second row we'll // be storing (5-3). If `first_row` is greater than `row`, this routine // simply decodes the row and adjusts the returned data pointer, but does // *not* actually store the row in the destination `dataptr` array. This // is enforced by all writes to the destination memory being guarded in the // following fashion: // if (dataptr != nullptr && dst_row > 0) { // static_cast<int32_t*>(dataptr)[dst_row] = static_cast<int32_t>(v); // *skipped_row = false; // } // The actual value is calculated by subtracting the first row from this given // row value, and then adding the absolute row offset. The row offset is // required to ensure we write to the correct destination location when we're // processing multiple blocks, i.e. this block could only have 10 rows, but // it's the 3rd block (where each block has 10 rows), so we need to write to // the 30th row in the destination array. ptrdiff_t const dst_row = (row >= first_row && row < end_row ? static_cast<ptrdiff_t>((row - first_row) + row_offset) : -1); // Critical invariant checks: dst_row should be -1 or greater, and // *skipped_row should always be true at this point (we set it to false only // if we write the decoded value to the destination array). if (dst_row < -1) { CUDF_UNREACHABLE("dst_row should be -1 or greater"); } if (*skipped_row != true) { CUDF_UNREACHABLE("skipped_row should be true"); } uint32_t array_start = 0, array_repeat_count = 0; int array_children = 0; for (uint32_t i = 0; i < schema_len;) { type_kind_e kind = schema[i].kind; logicaltype_kind_e logical_kind = schema[i].logical_kind; int skip = 0; if (is_supported_logical_type(logical_kind)) { kind = static_cast<type_kind_e>(logical_kind); } if (kind == type_union) { int skip_after; if (cur >= end) break; skip = (*cur++) >> 1; // NOTE: Assumes 1-byte union member skip_after = schema[i].count - skip - 1; ++i; while (skip > 0 && i < schema_len) { if (schema[i].kind >= type_record) { skip += schema[i].count; } ++i; --skip; } if (i >= schema_len || skip_after < 0) break; kind = schema[i].kind; logical_kind = schema[i].logical_kind; if (is_supported_logical_type(logical_kind)) { kind = static_cast<type_kind_e>(logical_kind); } skip = skip_after; } void* dataptr = schema[i].dataptr; switch (kind) { case type_null: if (dataptr != nullptr && dst_row >= 0) { atomicAnd(static_cast<uint32_t*>(dataptr) + (dst_row >> 5), ~(1 << (dst_row & 0x1f))); atomicAdd(&schema_g[i].count, 1); *skipped_row = false; } break; case type_int: { int64_t v = avro_decode_zigzag_varint(cur, end); if (dataptr != nullptr && dst_row >= 0) { static_cast<int32_t*>(dataptr)[dst_row] = static_cast<int32_t>(v); *skipped_row = false; } } break; case type_long: { int64_t v = avro_decode_zigzag_varint(cur, end); if (dataptr != nullptr && dst_row >= 0) { static_cast<int64_t*>(dataptr)[dst_row] = v; *skipped_row = false; } } break; case type_bytes: [[fallthrough]]; case type_string: [[fallthrough]]; case type_enum: { int64_t v = avro_decode_zigzag_varint(cur, end); size_t count = 0; char const* ptr = nullptr; if (kind == type_enum) { // dictionary size_t idx = schema[i].count + v; if (idx < global_dictionary.size()) { ptr = global_dictionary[idx].first; count = global_dictionary[idx].second; } } else if (v >= 0 && cur + v <= end) { // string or bytes ptr = reinterpret_cast<char const*>(cur); count = (size_t)v; cur += count; } if (dataptr != nullptr && dst_row >= 0) { static_cast<string_index_pair*>(dataptr)[dst_row].first = ptr; static_cast<string_index_pair*>(dataptr)[dst_row].second = count; *skipped_row = false; } } break; case type_float: if (dataptr != nullptr && dst_row >= 0) { uint32_t v; if (cur + 3 < end) { v = unaligned_load32(cur); cur += 4; } else { v = 0; } static_cast<uint32_t*>(dataptr)[dst_row] = v; *skipped_row = false; } else { cur += 4; } break; case type_double: if (dataptr != nullptr && dst_row >= 0) { uint64_t v; if (cur + 7 < end) { v = unaligned_load64(cur); cur += 8; } else { v = 0; } static_cast<uint64_t*>(dataptr)[dst_row] = v; *skipped_row = false; } else { cur += 8; } break; case type_boolean: if (dataptr != nullptr && dst_row >= 0) { uint8_t v = (cur < end) ? *cur : 0; static_cast<uint8_t*>(dataptr)[dst_row] = (v) ? 1 : 0; *skipped_row = false; } cur++; break; case type_array: { int32_t array_block_count = avro_decode_zigzag_varint(cur, end); if (array_block_count < 0) { avro_decode_zigzag_varint(cur, end); // block size in bytes, ignored array_block_count = -array_block_count; } array_start = i; array_repeat_count = array_block_count; array_children = 1; if (array_repeat_count == 0) { skip += schema[i].count; // Should always be 1 } } break; case type_duration: { // A duration logical type annotates Avro fixed type of size 12, which // stores three little-endian unsigned integers that represent durations // at different granularities of time. The first stores a number in // months, the second stores a number in days, and the third stores a // number in milliseconds. CUDF_UNREACHABLE("avro type 'duration' not yet implemented"); } break; // N.B. These aren't handled yet, see the discussion on // https://github.com/rapidsai/cudf/pull/12788. The decoding logic // is correct, though, so there's no harm in having them here. case type_timestamp_millis: [[fallthrough]]; case type_timestamp_micros: [[fallthrough]]; case type_local_timestamp_millis: [[fallthrough]]; case type_local_timestamp_micros: [[fallthrough]]; case type_time_millis: [[fallthrough]]; case type_time_micros: { // N.B. time-millis is stored as a 32-bit int, however, cudf expects an // int64 for DURATION_MILLISECONDS. From our perspective, the fact // that time-millis comes from a 32-bit int is hidden from us by // way of the zig-zag varint encoding, so we can safely treat them // both as int64_t. Everything else is 64-bit in both avro and // cudf. CUDF_UNREACHABLE("avro time/timestamp types not yet implemented"); // // When we do implement these, the following decoding logic should // be correct: // // int64_t v = avro_decode_zigzag_varint(cur, end); // if (dataptr != nullptr && dst_row >= 0) { // static_cast<int64_t*>(dataptr)[dst_row] = v; // *skipped_row = false; // } } break; case type_date: { int64_t v = avro_decode_zigzag_varint(cur, end); if (dataptr != nullptr && dst_row >= 0) { static_cast<int32_t*>(dataptr)[dst_row] = static_cast<int32_t>(v); *skipped_row = false; } } break; } if (array_repeat_count != 0) { array_children--; if (schema[i].kind >= type_record) { array_children += schema[i].count; } } i++; while (skip > 0 && i < schema_len) { if (schema[i].kind >= type_record) { skip += schema[i].count; } ++i; --skip; } // If within an array, check if we reached the last item if (array_repeat_count != 0 && array_children <= 0 && cur < end) { if (!--array_repeat_count) { i = array_start; // Restart at the array parent } else { i = array_start + 1; // Restart after the array parent array_children = schema[array_start].count; } } } return cur; } /** * @brief Decode column data * * @param[in] blocks Data block descriptions * @param[in] schema Schema description * @param[in] global_Dictionary Global dictionary entries * @param[in] avro_data Raw block data * @param[in] schema_len Number of entries in schema * @param[in] min_row_size Minimum size in bytes of a row */ // blockDim {32,num_warps,1} __global__ void __launch_bounds__(num_warps * 32, 2) gpuDecodeAvroColumnData(device_span<block_desc_s const> blocks, schemadesc_s* schema_g, device_span<string_index_pair const> global_dictionary, uint8_t const* avro_data, uint32_t schema_len, uint32_t min_row_size) { __shared__ __align__(8) schemadesc_s g_shared_schema[max_shared_schema_len]; __shared__ __align__(8) block_desc_s blk_g[num_warps]; schemadesc_s* schema; block_desc_s* const blk = &blk_g[threadIdx.y]; uint32_t block_id = blockIdx.x * num_warps + threadIdx.y; // Fetch schema into shared mem if possible if (schema_len <= max_shared_schema_len) { for (int i = threadIdx.y * 32 + threadIdx.x; i < schema_len; i += num_warps * 32) { g_shared_schema[i] = schema_g[i]; } __syncthreads(); schema = g_shared_schema; } else { schema = schema_g; } if (block_id < blocks.size() and threadIdx.x == 0) { *blk = blocks[block_id]; } __syncthreads(); if (block_id >= blocks.size()) { return; } uint8_t const* cur = avro_data + blk->offset; uint8_t const* end = cur + blk->size; size_t first_row = blk->first_row + blk->row_offset; size_t cur_row = blk->row_offset; size_t end_row = first_row + blk->num_rows; uint32_t rows_remaining = blk->num_rows; while (cur < end) { uint32_t nrows; uint8_t const* start = cur; if (cur + min_row_size * rows_remaining == end) { // We're dealing with predictable fixed-size rows, which means we can // process up to 32 rows (warp-width) at a time. This will be the case // when we're dealing with fixed-size data, e.g. of floats or doubles, // which are always 4 or 8 bytes respectively. nrows = min(rows_remaining, 32); cur += threadIdx.x * min_row_size; } else { // We're dealing with variable-size data, so only one row can be processed // by one thread at a time. nrows = 1; } if (threadIdx.x < nrows) { bool skipped_row = true; cur = avro_decode_row(schema, schema_g, schema_len, first_row, cur_row + threadIdx.x, end_row, blk->row_offset, cur, end, global_dictionary, &skipped_row); if (!skipped_row) { rows_remaining -= nrows; } } __syncwarp(); cur_row += nrows; if (nrows == 1) { // Only lane 0 (i.e. 'threadIdx.x == 0') was active, so we need to // broadcast the new value of 'cur' and 'rows_remaining' to all other // threads in the warp. cur = start + shuffle(static_cast<uint32_t>(cur - start)); // rows_remaining is already uint32_t, so we don't need to do the // start + shuffle(this - start) dance like we do above. rows_remaining = shuffle(rows_remaining); } else if (nrows > 1) { cur = start + (nrows * min_row_size); } } } /** * @brief Launches kernel for decoding column data * * @param[in] blocks Data block descriptions * @param[in] schema Schema description * @param[in] global_dictionary Global dictionary entries * @param[in] avro_data Raw block data * @param[in] schema_len Number of entries in schema * @param[in] min_row_size Minimum size in bytes of a row * @param[in] stream CUDA stream to use */ void DecodeAvroColumnData(device_span<block_desc_s const> blocks, schemadesc_s* schema, device_span<string_index_pair const> global_dictionary, uint8_t const* avro_data, uint32_t schema_len, uint32_t min_row_size, rmm::cuda_stream_view stream) { // num_warps warps per threadblock dim3 const dim_block(32, num_warps); // 1 warp per datablock, num_warps datablocks per threadblock dim3 const dim_grid((blocks.size() + num_warps - 1) / num_warps, 1); gpuDecodeAvroColumnData<<<dim_grid, dim_block, 0, stream.value()>>>( blocks, schema, global_dictionary, avro_data, schema_len, min_row_size); } } // namespace gpu } // namespace avro } // namespace io } // namespace cudf
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/avro/avro_common.hpp
/* * Copyright (c) 2019-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #pragma once #include <io/utilities/column_buffer.hpp> #include <cstdint> #include <cstdio> namespace cudf { namespace io { namespace avro { struct block_desc_s { block_desc_s() = default; // required to compile on ctk-12.2 + aarch64 explicit constexpr block_desc_s( size_t offset_, uint32_t size_, uint32_t row_offset_, uint32_t first_row_, uint32_t num_rows_) : offset(offset_), size(size_), row_offset(row_offset_), first_row(first_row_), num_rows(num_rows_) { } // Offset of this block, in bytes, from the start of the file. size_t offset; // Size of this block, in bytes. uint32_t size; // The absolute row offset that needs to be added to each row index in order // to derive the offset of the decoded data in the destination array. E.g. // `const ptrdiff_t dst_row = ((row - first_row) + row_offset)`. See // `avro_decode_row()` for details. uint32_t row_offset; // The index of the first row to be *saved* from this block. That is, the // number of rows to skip in this block before starting to save values. If // this is 0, then no rows will be skipped (all rows will be saved). If a // user has requested `read_avro()` to skip rows, that will materialize as a // non-zero `first_row` value in the appropriate block containing the first // row to be saved. // // N.B. We explicitly use the word "saved" here, not "decoded". Technically, // all rows are decoded, one column at a time, as the process of decoding // a column value is what informs us of the value's size in bytes (in its // encoded form), and thus, where the next column starts. However, we // only *save* these decoded values based on the `first_row`. uint32_t first_row; // The number of rows to save from this block. If a user has requested // `read_avro()` to limit the number of rows to return, this will materialize // as a `num_rows` value less than the total number of rows in the appropriate // block. Otherwise, `num_rows` will be equal to the total number of rows in // the block, after skipping `first_row` rows (if applicable). // // N.B. Unlike `first_rows`, where all rows and columns are decoded prior to // reaching the point we've been requested to start *saving* values -- // once the `num_rows` limit has been reached, no further decoding takes // place. uint32_t num_rows; }; enum type_kind_e { type_not_set = -1, // Primitive types type_null = 0, type_boolean, type_int, type_long, type_float, type_double, type_bytes, type_string, // Complex types type_enum, type_record, type_union, type_array, type_fixed, // Logical types type_decimal, type_uuid, type_date, type_time_millis, type_time_micros, type_timestamp_millis, type_timestamp_micros, type_local_timestamp_millis, type_local_timestamp_micros, type_duration, }; enum logicaltype_kind_e { logicaltype_not_set = 0, // N.B. We intentionally mirror the logicaltype enum values with their // equivalent type enum value, as this allows us to cast the type // value directly to a logical type without an intermediate // mapping step, and vice versa, e.g.: // // auto kind = type_date; // auto logical_kind = static_cast<logical_kind_e>(type_date); // // logical_kind == logicaltype_kind_e::logicaltype_date // // And: // // auto logical_kind = logicaltype_date; // auto kind = static_cast<type_kind_e>(logical_kind); // // kind == type_kind_e::type_date // logicaltype_decimal = type_decimal, logicaltype_uuid, logicaltype_date, logicaltype_time_millis, logicaltype_time_micros, logicaltype_timestamp_millis, logicaltype_timestamp_micros, logicaltype_local_timestamp_millis, logicaltype_local_timestamp_micros, logicaltype_duration, }; /** * @brief Determines if the supplied logical type is currently supported. * * @param[in] logical_kind Supplies the logicaltype_kind_e enum value. * * @return true if the logical type is supported, false otherwise. */ inline constexpr bool is_supported_logical_type(logicaltype_kind_e logical_kind) { switch (logical_kind) { case logicaltype_date: return true; case logicaltype_not_set: [[fallthrough]]; case logicaltype_decimal: [[fallthrough]]; case logicaltype_uuid: [[fallthrough]]; case logicaltype_time_millis: [[fallthrough]]; case logicaltype_time_micros: [[fallthrough]]; case logicaltype_timestamp_millis: [[fallthrough]]; case logicaltype_timestamp_micros: [[fallthrough]]; case logicaltype_local_timestamp_millis: [[fallthrough]]; case logicaltype_local_timestamp_micros: [[fallthrough]]; case logicaltype_duration: [[fallthrough]]; default: return false; } } using cudf::io::detail::string_index_pair; } // namespace avro } // namespace io } // namespace cudf
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/avro/reader_impl.cu
/* * Copyright (c) 2020-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "avro.hpp" #include "avro_gpu.hpp" #include <io/comp/gpuinflate.hpp> #include <io/utilities/column_buffer.hpp> #include <io/utilities/hostdevice_vector.hpp> #include <cudf/detail/null_mask.hpp> #include <cudf/detail/utilities/vector_factories.hpp> #include <cudf/io/datasource.hpp> #include <cudf/io/detail/avro.hpp> #include <cudf/table/table.hpp> #include <cudf/utilities/error.hpp> #include <cudf/utilities/span.hpp> #include <cudf/utilities/traits.hpp> #include <rmm/cuda_stream_view.hpp> #include <rmm/device_buffer.hpp> #include <rmm/device_uvector.hpp> #include <rmm/exec_policy.hpp> #include <thrust/equal.h> #include <thrust/functional.h> #include <thrust/iterator/constant_iterator.h> #include <thrust/iterator/transform_output_iterator.h> #include <thrust/tabulate.h> #include <nvcomp/snappy.h> #include <memory> #include <numeric> #include <string> #include <utility> #include <vector> using cudf::device_span; namespace cudf { namespace io { namespace detail { namespace avro { // Import functionality that's independent of legacy code using namespace cudf::io::avro; using namespace cudf::io; namespace { /** * @brief Function that translates Avro data kind to cuDF type enum */ type_id to_type_id(avro::schema_entry const* col) { avro::type_kind_e kind; // N.B. The switch statement seems a bit ridiculous for a single type, but the // plan is to incrementally add more types to it as support is added for // them in the future. switch (col->logical_kind) { case avro::logicaltype_date: kind = static_cast<avro::type_kind_e>(col->logical_kind); break; case avro::logicaltype_not_set: [[fallthrough]]; default: kind = col->kind; break; } switch (kind) { case avro::type_boolean: return type_id::BOOL8; case avro::type_int: return type_id::INT32; case avro::type_long: return type_id::INT64; case avro::type_float: return type_id::FLOAT32; case avro::type_double: return type_id::FLOAT64; case avro::type_bytes: [[fallthrough]]; case avro::type_string: return type_id::STRING; case avro::type_date: return type_id::TIMESTAMP_DAYS; case avro::type_timestamp_millis: return type_id::TIMESTAMP_MILLISECONDS; case avro::type_timestamp_micros: return type_id::TIMESTAMP_MICROSECONDS; case avro::type_local_timestamp_millis: return type_id::TIMESTAMP_MILLISECONDS; case avro::type_local_timestamp_micros: return type_id::TIMESTAMP_MICROSECONDS; case avro::type_enum: return (!col->symbols.empty()) ? type_id::STRING : type_id::INT32; // The avro time-millis and time-micros types are closest to Arrow's // TIME32 and TIME64. They're single-day units, i.e. they won't exceed // 23:59:59.9999 (or .999999 for micros). There's no equivalent cudf // type for this; type_id::DURATION_MILLISECONDS/MICROSECONDS are close, // but they're not semantically the same. case avro::type_time_millis: [[fallthrough]]; case avro::type_time_micros: [[fallthrough]]; // There's no cudf equivalent for the avro duration type, which is a fixed // 12 byte value which stores three little-endian unsigned 32-bit integers // representing months, days, and milliseconds, respectively. case avro::type_duration: [[fallthrough]]; default: return type_id::EMPTY; } } } // namespace /** * @brief A helper wrapper for Avro file metadata. Provides some additional * convenience methods for initializing and accessing the metadata and schema */ class metadata : public file_metadata { public: explicit metadata(datasource* const src) : source(src) {} /** * @brief Initializes the parser and filters down to a subset of rows * * @param[in,out] row_start Starting row of the selection * @param[in,out] row_count Total number of rows selected */ void init_and_select_rows(size_type& row_start, size_type& row_count) { auto const buffer = source->host_read(0, source->size()); avro::container pod(buffer->data(), buffer->size()); CUDF_EXPECTS(pod.parse(this, row_count, row_start), "Cannot parse metadata"); row_start = skip_rows; row_count = num_rows; } /** * @brief Filters and reduces down to a selection of columns * * @param[in] use_names List of column names to select * * @return List of column names */ auto select_columns(std::vector<std::string> use_names) { std::vector<std::pair<int, std::string>> selection; auto const num_avro_columns = static_cast<int>(columns.size()); if (!use_names.empty()) { int index = 0; for (auto const& use_name : use_names) { for (int i = 0; i < num_avro_columns; ++i, ++index) { if (index >= num_avro_columns) { index = 0; } if (columns[index].name == use_name && type_id::EMPTY != to_type_id(&schema[columns[index].schema_data_idx])) { selection.emplace_back(index, columns[index].name); index++; break; } } } CUDF_EXPECTS(selection.size() > 0, "Filtered out all columns"); } else { for (int i = 0; i < num_avro_columns; ++i) { // Exclude array columns (unsupported) bool column_in_array = false; for (int parent_idx = schema[columns[i].schema_data_idx].parent_idx; parent_idx > 0; parent_idx = schema[parent_idx].parent_idx) { if (schema[parent_idx].kind == avro::type_array) { column_in_array = true; break; } } if (!column_in_array) { auto col_type = to_type_id(&schema[columns[i].schema_data_idx]); CUDF_EXPECTS(col_type != type_id::EMPTY, "Unsupported data type"); selection.emplace_back(i, columns[i].name); } } } return selection; } private: datasource* const source; }; rmm::device_buffer decompress_data(datasource& source, metadata& meta, rmm::device_buffer const& comp_block_data, rmm::cuda_stream_view stream) { if (meta.codec == "deflate") { auto inflate_in = cudf::detail::hostdevice_vector<device_span<uint8_t const>>(meta.block_list.size(), stream); auto inflate_out = cudf::detail::hostdevice_vector<device_span<uint8_t>>(meta.block_list.size(), stream); auto inflate_stats = cudf::detail::hostdevice_vector<compression_result>(meta.block_list.size(), stream); thrust::fill(rmm::exec_policy(stream), inflate_stats.d_begin(), inflate_stats.d_end(), compression_result{0, compression_status::FAILURE}); // Guess an initial maximum uncompressed block size. We estimate the compression factor is two // and round up to the next multiple of 4096 bytes. uint32_t const initial_blk_len = meta.max_block_size * 2 + (meta.max_block_size * 2) % 4096; size_t const uncomp_size = initial_blk_len * meta.block_list.size(); rmm::device_buffer decomp_block_data(uncomp_size, stream); auto const base_offset = meta.block_list[0].offset; for (size_t i = 0, dst_pos = 0; i < meta.block_list.size(); i++) { auto const src_pos = meta.block_list[i].offset - base_offset; inflate_in[i] = {static_cast<uint8_t const*>(comp_block_data.data()) + src_pos, meta.block_list[i].size}; inflate_out[i] = {static_cast<uint8_t*>(decomp_block_data.data()) + dst_pos, initial_blk_len}; // Update blocks offsets & sizes to refer to uncompressed data meta.block_list[i].offset = dst_pos; meta.block_list[i].size = static_cast<uint32_t>(inflate_out[i].size()); dst_pos += meta.block_list[i].size; } inflate_in.host_to_device_async(stream); for (int loop_cnt = 0; loop_cnt < 2; loop_cnt++) { inflate_out.host_to_device_async(stream); gpuinflate(inflate_in, inflate_out, inflate_stats, gzip_header_included::NO, stream); inflate_stats.device_to_host_sync(stream); // Check if larger output is required, as it's not known ahead of time if (loop_cnt == 0) { std::vector<size_t> actual_uncomp_sizes; actual_uncomp_sizes.reserve(inflate_out.size()); std::transform(inflate_out.begin(), inflate_out.end(), inflate_stats.begin(), std::back_inserter(actual_uncomp_sizes), [](auto const& inf_out, auto const& inf_stats) { // If error status is OUTPUT_OVERFLOW, the `bytes_written` field // actually contains the uncompressed data size return inf_stats.status == compression_status::OUTPUT_OVERFLOW ? std::max(inf_out.size(), inf_stats.bytes_written) : inf_out.size(); }); auto const total_actual_uncomp_size = std::accumulate(actual_uncomp_sizes.cbegin(), actual_uncomp_sizes.cend(), 0ul); if (total_actual_uncomp_size > uncomp_size) { decomp_block_data.resize(total_actual_uncomp_size, stream); for (size_t i = 0; i < meta.block_list.size(); ++i) { meta.block_list[i].offset = i > 0 ? (meta.block_list[i - 1].size + meta.block_list[i - 1].offset) : 0; meta.block_list[i].size = static_cast<uint32_t>(actual_uncomp_sizes[i]); inflate_out[i] = { static_cast<uint8_t*>(decomp_block_data.data()) + meta.block_list[i].offset, meta.block_list[i].size}; } } else { break; } } } return decomp_block_data; } else if (meta.codec == "snappy") { size_t const num_blocks = meta.block_list.size(); // comp_block_data contains contents of the avro file starting from the first block, excluding // file header. meta.block_list[i].offset refers to offset of block i in the file, including // file header. // Find ptrs to each compressed block in comp_block_data by removing header offset. cudf::detail::hostdevice_vector<void const*> compressed_data_ptrs(num_blocks, stream); std::transform(meta.block_list.begin(), meta.block_list.end(), compressed_data_ptrs.host_ptr(), [&](auto const& block) { return static_cast<std::byte const*>(comp_block_data.data()) + (block.offset - meta.block_list[0].offset); }); compressed_data_ptrs.host_to_device_async(stream); cudf::detail::hostdevice_vector<size_t> compressed_data_sizes(num_blocks, stream); std::transform(meta.block_list.begin(), meta.block_list.end(), compressed_data_sizes.host_ptr(), [](auto const& block) { return block.size; }); compressed_data_sizes.host_to_device_async(stream); cudf::detail::hostdevice_vector<size_t> uncompressed_data_sizes(num_blocks, stream); nvcompStatus_t status = nvcompBatchedSnappyGetDecompressSizeAsync(compressed_data_ptrs.device_ptr(), compressed_data_sizes.device_ptr(), uncompressed_data_sizes.device_ptr(), num_blocks, stream.value()); CUDF_EXPECTS(status == nvcompStatus_t::nvcompSuccess, "Unable to get uncompressed sizes for snappy compressed blocks"); uncompressed_data_sizes.device_to_host_sync(stream); size_t const uncompressed_data_size = std::reduce(uncompressed_data_sizes.begin(), uncompressed_data_sizes.end()); size_t const max_uncomp_block_size = std::reduce( uncompressed_data_sizes.begin(), uncompressed_data_sizes.end(), 0, thrust::maximum<size_t>()); size_t temp_size; status = nvcompBatchedSnappyDecompressGetTempSize(num_blocks, max_uncomp_block_size, &temp_size); CUDF_EXPECTS(status == nvcompStatus_t::nvcompSuccess, "Unable to get scratch size for snappy decompression"); rmm::device_buffer scratch(temp_size, stream); rmm::device_buffer decomp_block_data(uncompressed_data_size, stream); rmm::device_uvector<void*> uncompressed_data_ptrs(num_blocks, stream); cudf::detail::hostdevice_vector<size_t> uncompressed_data_offsets(num_blocks, stream); std::exclusive_scan(uncompressed_data_sizes.begin(), uncompressed_data_sizes.end(), uncompressed_data_offsets.begin(), 0); uncompressed_data_offsets.host_to_device_async(stream); thrust::tabulate(rmm::exec_policy(stream), uncompressed_data_ptrs.begin(), uncompressed_data_ptrs.end(), [off = uncompressed_data_offsets.device_ptr(), data = static_cast<std::byte*>(decomp_block_data.data())] __device__(int i) { return data + off[i]; }); rmm::device_uvector<size_t> actual_uncompressed_data_sizes(num_blocks, stream); rmm::device_uvector<nvcompStatus_t> statuses(num_blocks, stream); status = nvcompBatchedSnappyDecompressAsync(compressed_data_ptrs.device_ptr(), compressed_data_sizes.device_ptr(), uncompressed_data_sizes.device_ptr(), actual_uncompressed_data_sizes.data(), num_blocks, scratch.data(), scratch.size(), uncompressed_data_ptrs.data(), statuses.data(), stream); CUDF_EXPECTS(status == nvcompStatus_t::nvcompSuccess, "unable to perform snappy decompression"); CUDF_EXPECTS(thrust::equal(rmm::exec_policy(stream), uncompressed_data_sizes.d_begin(), uncompressed_data_sizes.d_end(), actual_uncompressed_data_sizes.begin()), "Mismatch in expected and actual decompressed size during snappy decompression"); CUDF_EXPECTS(thrust::equal(rmm::exec_policy(stream), statuses.begin(), statuses.end(), thrust::make_constant_iterator(nvcompStatus_t::nvcompSuccess)), "Error during snappy decompression"); // Update blocks offsets & sizes to refer to uncompressed data for (size_t i = 0; i < num_blocks; i++) { meta.block_list[i].offset = uncompressed_data_offsets[i]; meta.block_list[i].size = uncompressed_data_sizes[i]; } return decomp_block_data; } else { CUDF_FAIL("Unsupported compression codec\n"); } } std::vector<column_buffer> decode_data(metadata& meta, rmm::device_buffer const& block_data, std::vector<std::pair<uint32_t, uint32_t>> const& dict, device_span<string_index_pair const> global_dictionary, size_t num_rows, std::vector<std::pair<int, std::string>> const& selection, std::vector<data_type> const& column_types, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { auto out_buffers = std::vector<column_buffer>(); for (size_t i = 0; i < column_types.size(); ++i) { auto col_idx = selection[i].first; bool is_nullable = (meta.columns[col_idx].schema_null_idx >= 0); out_buffers.emplace_back(column_types[i], num_rows, is_nullable, stream, mr); } // Build gpu schema auto schema_desc = cudf::detail::hostdevice_vector<gpu::schemadesc_s>(meta.schema.size(), stream); uint32_t min_row_data_size = 0; int skip_field_cnt = 0; for (size_t i = 0; i < meta.schema.size(); i++) { type_kind_e kind = meta.schema[i].kind; logicaltype_kind_e logical_kind = meta.schema[i].logical_kind; if (skip_field_cnt != 0) { // Exclude union and array members from min_row_data_size skip_field_cnt += meta.schema[i].num_children - 1; } else { switch (kind) { case type_union: case type_array: skip_field_cnt = meta.schema[i].num_children; // fall through case type_boolean: case type_int: case type_long: case type_bytes: case type_string: case type_enum: min_row_data_size += 1; break; case type_float: min_row_data_size += 4; break; case type_double: min_row_data_size += 8; break; default: break; } } if (kind == type_enum && !meta.schema[i].symbols.size()) { kind = type_int; } schema_desc[i].kind = kind; schema_desc[i].logical_kind = logical_kind; schema_desc[i].count = (kind == type_enum) ? 0 : static_cast<uint32_t>(meta.schema[i].num_children); schema_desc[i].dataptr = nullptr; CUDF_EXPECTS(kind != type_union || meta.schema[i].num_children < 2 || (meta.schema[i].num_children == 2 && (meta.schema[i + 1].kind == type_null || meta.schema[i + 2].kind == type_null)), "Union with non-null type not currently supported"); } std::vector<void*> valid_alias(out_buffers.size(), nullptr); for (size_t i = 0; i < out_buffers.size(); i++) { auto const col_idx = selection[i].first; int schema_data_idx = meta.columns[col_idx].schema_data_idx; int schema_null_idx = meta.columns[col_idx].schema_null_idx; schema_desc[schema_data_idx].dataptr = out_buffers[i].data(); if (schema_null_idx >= 0) { if (!schema_desc[schema_null_idx].dataptr) { schema_desc[schema_null_idx].dataptr = out_buffers[i].null_mask(); } else { valid_alias[i] = schema_desc[schema_null_idx].dataptr; } } if (meta.schema[schema_data_idx].kind == type_enum) { schema_desc[schema_data_idx].count = dict[i].first; } if (out_buffers[i].null_mask_size()) { cudf::detail::set_null_mask(out_buffers[i].null_mask(), 0, num_rows, true, stream); } } auto block_list = cudf::detail::make_device_uvector_async( meta.block_list, stream, rmm::mr::get_current_device_resource()); schema_desc.host_to_device_async(stream); gpu::DecodeAvroColumnData(block_list, schema_desc.device_ptr(), global_dictionary, static_cast<uint8_t const*>(block_data.data()), static_cast<uint32_t>(schema_desc.size()), min_row_data_size, stream); // Copy valid bits that are shared between columns for (size_t i = 0; i < out_buffers.size(); i++) { if (valid_alias[i] != nullptr) { CUDF_CUDA_TRY(cudaMemcpyAsync(out_buffers[i].null_mask(), valid_alias[i], out_buffers[i].null_mask_size(), cudaMemcpyDefault, stream.value())); } } schema_desc.device_to_host_sync(stream); for (size_t i = 0; i < out_buffers.size(); i++) { auto const col_idx = selection[i].first; auto const schema_null_idx = meta.columns[col_idx].schema_null_idx; out_buffers[i].null_count() = (schema_null_idx >= 0) ? schema_desc[schema_null_idx].count : 0; } return out_buffers; } table_with_metadata read_avro(std::unique_ptr<cudf::io::datasource>&& source, avro_reader_options const& options, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { auto skip_rows = options.get_skip_rows(); auto num_rows = options.get_num_rows(); std::vector<std::unique_ptr<column>> out_columns; table_metadata metadata_out; // Open the source Avro dataset metadata auto meta = metadata(source.get()); // Select and read partial metadata / schema within the subset of rows meta.init_and_select_rows(skip_rows, num_rows); // Select only columns required by the options auto selected_columns = meta.select_columns(options.get_columns()); if (not selected_columns.empty()) { // Get a list of column data types std::vector<data_type> column_types; for (auto const& col : selected_columns) { auto& col_schema = meta.schema[meta.columns[col.first].schema_data_idx]; auto col_type = to_type_id(&col_schema); CUDF_EXPECTS(col_type != type_id::EMPTY, "Unknown type"); column_types.emplace_back(col_type); } if (meta.num_rows > 0) { rmm::device_buffer block_data; if (source->is_device_read_preferred(meta.selected_data_size)) { block_data = rmm::device_buffer{meta.selected_data_size, stream}; auto read_bytes = source->device_read(meta.block_list[0].offset, meta.selected_data_size, static_cast<uint8_t*>(block_data.data()), stream); block_data.resize(read_bytes, stream); } else { auto const buffer = source->host_read(meta.block_list[0].offset, meta.selected_data_size); block_data = rmm::device_buffer{buffer->data(), buffer->size(), stream}; } if (meta.codec != "" && meta.codec != "null") { auto decomp_block_data = decompress_data(*source, meta, block_data, stream); block_data = std::move(decomp_block_data); } else { auto dst_ofs = meta.block_list[0].offset; for (size_t i = 0; i < meta.block_list.size(); i++) { meta.block_list[i].offset -= dst_ofs; } } size_t total_dictionary_entries = 0; size_t dictionary_data_size = 0; auto dict = std::vector<std::pair<uint32_t, uint32_t>>(column_types.size()); for (size_t i = 0; i < column_types.size(); ++i) { auto col_idx = selected_columns[i].first; auto& col_schema = meta.schema[meta.columns[col_idx].schema_data_idx]; dict[i].first = static_cast<uint32_t>(total_dictionary_entries); dict[i].second = static_cast<uint32_t>(col_schema.symbols.size()); total_dictionary_entries += dict[i].second; for (auto const& sym : col_schema.symbols) { dictionary_data_size += sym.length(); } } auto d_global_dict = rmm::device_uvector<string_index_pair>(0, stream); auto d_global_dict_data = rmm::device_uvector<char>(0, stream); if (total_dictionary_entries > 0) { auto h_global_dict = std::vector<string_index_pair>(total_dictionary_entries); auto h_global_dict_data = std::vector<char>(dictionary_data_size); size_t dict_pos = 0; for (size_t i = 0; i < column_types.size(); ++i) { auto const col_idx = selected_columns[i].first; auto const& col_schema = meta.schema[meta.columns[col_idx].schema_data_idx]; auto const col_dict_entries = &(h_global_dict[dict[i].first]); for (size_t j = 0; j < dict[i].second; j++) { auto const& symbols = col_schema.symbols[j]; auto const data_dst = h_global_dict_data.data() + dict_pos; auto const len = symbols.length(); col_dict_entries[j].first = data_dst; col_dict_entries[j].second = len; std::copy(symbols.c_str(), symbols.c_str() + len, data_dst); dict_pos += len; } } d_global_dict = cudf::detail::make_device_uvector_async( h_global_dict, stream, rmm::mr::get_current_device_resource()); d_global_dict_data = cudf::detail::make_device_uvector_async( h_global_dict_data, stream, rmm::mr::get_current_device_resource()); stream.synchronize(); } auto out_buffers = decode_data(meta, block_data, dict, d_global_dict, num_rows, selected_columns, column_types, stream, mr); for (size_t i = 0; i < column_types.size(); ++i) { out_columns.emplace_back(make_column(out_buffers[i], nullptr, std::nullopt, stream)); } } else { // Create empty columns for (size_t i = 0; i < column_types.size(); ++i) { out_columns.emplace_back(make_empty_column(column_types[i])); } } } // Return column names metadata_out.schema_info.reserve(selected_columns.size()); std::transform(selected_columns.cbegin(), selected_columns.cend(), std::back_inserter(metadata_out.schema_info), [](auto const& c) { return column_name_info{c.second}; }); // Return user metadata metadata_out.user_data = meta.user_data; metadata_out.per_file_user_data = {{meta.user_data.begin(), meta.user_data.end()}}; return {std::make_unique<table>(std::move(out_columns)), std::move(metadata_out)}; } } // namespace avro } // namespace detail } // namespace io } // namespace cudf
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/avro/avro.hpp
/* * Copyright (c) 2019-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #pragma once #include "avro_common.hpp" #include <algorithm> #include <cstddef> #include <cstdint> #include <cstdio> #include <cstring> #include <map> #include <string> #include <vector> namespace cudf { namespace io { namespace avro { /** * @brief AVRO schema entry */ struct schema_entry { explicit schema_entry(type_kind_e kind_, int32_t parent_idx_ = -1, int32_t num_children_ = 0) : parent_idx(parent_idx_), num_children(num_children_), kind(kind_) { } int32_t parent_idx = -1; // index of parent entry in schema array, negative if no parent int32_t num_children = 0; type_kind_e kind = type_not_set; logicaltype_kind_e logical_kind = logicaltype_not_set; std::string name = ""; std::vector<std::string> symbols; }; /** * @brief AVRO output column */ struct column_desc { int32_t schema_data_idx = -1; // schema index of data column int32_t schema_null_idx = -1; // schema index of corresponding null object int32_t parent_union_idx = -1; // index of this column in parent union (-1 if not a union member) std::string name = ""; }; /** * @brief AVRO file metadata struct * * `metadata_size` is the size in bytes of the avro file header. * * `total_data_size` is the size of all data minus `metadata_size`. * * `selected_data_size` is the size of all data minus `metadata_size`, with any * adjustments made to account for the number of rows or rows to skip per the * user's request. This is the value used to size device-side buffers. * * `num_rows` is the number of rows that will be processed. If the user has not * requested the number of rows to be limited (i.e. via the `num_rows` param to * `read_avro()`), this number will represent all rows in the file *after* the * `skip_rows` parameter has been taken into consideration (assuming a request * has been made to also skip rows). * * `total_num_rows` is the total number of rows present in the file, across all * blocks. This may be more than `num_rows` if the user has requested a limit * on the number of rows to return, or if `skip_rows` is active. * * `skip_rows` is the number of rows the user has requested to skip. Note that * this value may differ from the `block_desc_s.first_row` member, which will * capture the number of rows to skip for a given block. * * `block_list` is a list of all blocks that contain the selected rows. If no * row filtering has been done via `num_rows` or `skip_rows`; it will contain * all blocks. Otherwise, it will contain only blocks selected by those * constraints. * * N.B. It is important to note that the coordination of skipping and limiting * rows is dictated by the `first_row` and `num_rows` members of each block * in the block list, *not* the `skip_rows` and `num_rows` members of this * struct. * * This is because the first row and number of rows to process for each * block needs to be handled at the individual block level in order to * correctly support avro multi-block files. * * See also the `block_desc_s` struct. */ struct file_metadata { std::map<std::string, std::string> user_data; std::string codec = ""; uint64_t sync_marker[2] = {0, 0}; size_t metadata_size = 0; size_t total_data_size = 0; size_t selected_data_size = 0; size_type num_rows = 0; size_type skip_rows = 0; size_type total_num_rows = 0; uint32_t max_block_size = 0; std::vector<schema_entry> schema; std::vector<block_desc_s> block_list; std::vector<column_desc> columns; }; /** * @brief Extract AVRO schema from JSON string */ class schema_parser { protected: enum { MAX_SCHEMA_DEPTH = 32 }; public: schema_parser() {} bool parse(std::vector<schema_entry>& schema, std::string const& str); protected: [[nodiscard]] bool more_data() const { return (m_cur < m_end); } std::string get_str(); protected: char const* m_base; char const* m_cur; char const* m_end; }; /** * @brief AVRO file container parsing class */ class container { public: container(uint8_t const* base, size_t len) noexcept : m_base{base}, m_start{base}, m_cur{base}, m_end{base + len} { } [[nodiscard]] auto bytecount() const { return m_cur - m_start; } template <typename T> T get_raw() { if (m_cur + sizeof(T) > m_end) return T{}; T val; memcpy(&val, m_cur, sizeof(T)); m_cur += sizeof(T); return val; } template <typename T> T get_encoded(); public: bool parse(file_metadata* md, size_t max_num_rows = 0x7fff'ffff, size_t first_row = 0); protected: // Base address of the file data. This will always point to the file's metadata. uint8_t const* m_base; // Start, current, and end pointers for the file. These pointers refer to the // actual data content of the file, not the metadata. `m_cur` and `m_start` // will only ever differ if a user has requested `read_avro()` to skip rows; // in this case, `m_start` will be the base address of the block that contains // the first row to be processed. `m_cur` is updated as the file is parsed, // until either `m_end` is reached, or the number of rows requested by the user // is reached. uint8_t const* m_start; uint8_t const* m_cur; uint8_t const* m_end; }; } // namespace avro } // namespace io } // namespace cudf
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/avro/avro_gpu.hpp
/* * Copyright (c) 2019-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #pragma once #include "avro_common.hpp" #include <cudf/utilities/span.hpp> #include <rmm/cuda_stream_view.hpp> namespace cudf { namespace io { namespace avro { namespace gpu { /** * @brief Struct to describe the avro schema */ struct schemadesc_s { cudf::io::avro::type_kind_e kind; // avro type kind cudf::io::avro::logicaltype_kind_e logical_kind; // avro logicaltype kind uint32_t count; // for records/unions: number of following child columns, for nulls: global // null_count, for enums: dictionary ofs void* dataptr; // Ptr to column data, or null if column not selected }; /** * @brief Launches kernel for decoding column data * * @param[in] blocks Data block descriptions * @param[in] schema Schema description * @param[in] global_dictionary Global dictionary entries * @param[in] avro_data Raw block data * @param[in] schema_len Number of entries in schema * @param[in] min_row_size Minimum size in bytes of a row * @param[in] stream CUDA stream to use */ void DecodeAvroColumnData(cudf::device_span<block_desc_s const> blocks, schemadesc_s* schema, cudf::device_span<string_index_pair const> global_dictionary, uint8_t const* avro_data, uint32_t schema_len, uint32_t min_row_size, rmm::cuda_stream_view stream); } // namespace gpu } // namespace avro } // namespace io } // namespace cudf
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/fst/logical_stack.cuh
/* * Copyright (c) 2022-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #pragma once #include <cudf/types.hpp> #include <cudf/utilities/default_stream.hpp> #include <cudf/utilities/error.hpp> #include <cudf/utilities/span.hpp> #include <cudf_test/print_utilities.cuh> #include <rmm/device_buffer.hpp> #include <rmm/device_uvector.hpp> #include <rmm/exec_policy.hpp> #include <thrust/device_ptr.h> #include <thrust/execution_policy.h> #include <thrust/fill.h> #include <thrust/iterator/transform_output_iterator.h> #include <thrust/scatter.h> #include <cub/cub.cuh> #include <algorithm> #include <cstdint> #include <type_traits> namespace cudf::io::fst { /** * @brief Describes the kind of stack operation. */ enum class stack_op_type : int8_t { READ = 0, ///< Operation reading what is currently on top of the stack PUSH = 1, ///< Operation pushing a new item on top of the stack POP = 2, ///< Operation popping the item currently on top of the stack RESET = 3 ///< Operation popping all items currently on the stack }; /** * @brief Describes the kind of stack operations supported by the logical stack. */ enum class stack_op_support : bool { NO_RESET_SUPPORT = false, ///< A stack that only supports push(x) and pop() operations WITH_RESET_SUPPORT = true ///< A stack that supports push(x), pop(), and reset() operations }; namespace detail { /** * @brief A convenience struct that represents a stack operation as a pair, where the stack_level * represents the stack's level and the value represents the stack symbol. * * @tparam StackLevelT The stack level type sufficient to cover all stack levels. Must be signed * type as any subsequence of stack operations must be able to be covered. E.g., consider the first * 10 operations are all push and the last 10 operations are all pop operations, we need to be able * to represent a partial aggregate of the first ten items, which is '+10', just as well as a * partial aggregate of the last ten items, which is '-10'. * @tparam ValueT The value type that corresponds to the stack symbols (i.e., covers the stack * alphabet). */ template <typename StackLevelT, typename ValueT> struct StackOp { // Must be signed type as any subsequence of stack operations must be able to be covered. static_assert(std::is_signed_v<StackLevelT>, "StackLevelT has to be a signed type"); StackLevelT stack_level; ValueT value; }; /** * @brief Helper class to assist with radix sorting StackOp instances by stack level. * * @tparam BYTE_SIZE The size of the StackOp. */ template <std::size_t BYTE_SIZE> struct StackOpToUnsigned { using UnsignedT = void; }; template <> struct StackOpToUnsigned<2U> { using UnsignedT = uint16_t; }; template <> struct StackOpToUnsigned<4U> { using UnsignedT = uint32_t; }; template <> struct StackOpToUnsigned<8U> { using UnsignedT = uint64_t; }; /** * @brief Alias template to retrieve an unsigned bit-representation that can be used for radix * sorting the stack level of a StackOp. * * @tparam StackOpT The StackOp class template instance for which to get an unsigned * bit-representation */ template <typename StackOpT> using UnsignedStackOpType = typename StackOpToUnsigned<sizeof(StackOpT)>::UnsignedT; /** * @brief Function object class template used for converting a stack symbol to a stack * operation that has a stack level to which an operation applies. * * @tparam StackOpT * @tparam StackSymbolToStackOpTypeT */ template <typename StackOpT, typename StackSymbolToStackOpTypeT> struct StackSymbolToStackOp { template <typename StackSymbolT> constexpr CUDF_HOST_DEVICE StackOpT operator()(StackSymbolT const& stack_symbol) const { stack_op_type stack_op = symbol_to_stack_op_type(stack_symbol); // PUSH => +1, POP => -1, READ => 0 int32_t level_delta = (stack_op == stack_op_type::PUSH) ? 1 : (stack_op == stack_op_type::POP) ? -1 : 0; return StackOpT{static_cast<decltype(StackOpT::stack_level)>(level_delta), stack_symbol}; } /// Function object returning a stack operation type for a given stack symbol StackSymbolToStackOpTypeT symbol_to_stack_op_type; }; /** * @brief Function object that maps a stack `reset` operation to `1`. */ template <typename StackSymbolToStackOpTypeT> struct NewlineToResetStackSegmentOp { template <typename StackSymbolT> constexpr CUDF_HOST_DEVICE uint32_t operator()(StackSymbolT const& stack_symbol) const { stack_op_type stack_op = symbol_to_stack_op_type(stack_symbol); // Every reset operation marks the beginning of a new segment return (stack_op == stack_op_type::RESET) ? 1 : 0; } /// Function object returning a stack operation type for a given stack symbol StackSymbolToStackOpTypeT symbol_to_stack_op_type; }; /** * @brief Function object that wraps around for values that exceed the largest value of `TargetT` */ template <typename TargetT> struct ModToTargetTypeOpT { template <typename T> constexpr CUDF_HOST_DEVICE TargetT operator()(T const& val) const { return static_cast<TargetT>( val % (static_cast<T>(cuda::std::numeric_limits<TargetT>::max()) + static_cast<T>(1))); } }; /** * @brief Binary reduction operator to compute the absolute stack level from relative stack levels * (i.e., +1 for a PUSH, -1 for a POP operation). */ template <typename StackSymbolToStackOpTypeT> struct AddStackLevelFromStackOp { template <typename StackLevelT, typename ValueT> constexpr CUDF_HOST_DEVICE StackOp<StackLevelT, ValueT> operator()( StackOp<StackLevelT, ValueT> const& lhs, StackOp<StackLevelT, ValueT> const& rhs) const { StackLevelT new_level = lhs.stack_level + rhs.stack_level; return StackOp<StackLevelT, ValueT>{new_level, rhs.value}; } /// Function object returning a stack operation type for a given stack symbol StackSymbolToStackOpTypeT symbol_to_stack_op_type; }; /** * @brief Binary reduction operator that propagates a write operation for a specific stack level to * all reads of that same stack level. That is, if the stack level of LHS compares equal to the * stack level of the RHS and if the RHS is a read and the LHS is a write operation type, then we * return LHS, otherwise we return the RHS. */ template <typename StackSymbolToStackOpTypeT> struct PopulatePopWithPush { template <typename StackLevelT, typename ValueT> constexpr CUDF_HOST_DEVICE StackOp<StackLevelT, ValueT> operator()( StackOp<StackLevelT, ValueT> const& lhs, StackOp<StackLevelT, ValueT> const& rhs) const { // If RHS is a read, then we need to figure out whether we can propagate the value from the LHS bool is_rhs_read = symbol_to_stack_op_type(rhs.value) != stack_op_type::PUSH; // Whether LHS is a matching write (i.e., the push operation that is on top of the stack for the // RHS's read) bool is_lhs_matching_write = (lhs.stack_level == rhs.stack_level) && symbol_to_stack_op_type(lhs.value) == stack_op_type::PUSH; return (is_rhs_read && is_lhs_matching_write) ? lhs : rhs; } /// Function object returning a stack operation type for a given stack symbol StackSymbolToStackOpTypeT symbol_to_stack_op_type; }; /** * @brief Binary reduction operator that is used to replace each read_symbol occurrence with the * last non-read_symbol that precedes such read_symbol. */ template <typename StackSymbolT> struct PropagateLastWrite { constexpr CUDF_HOST_DEVICE StackSymbolT operator()(StackSymbolT const& lhs, StackSymbolT const& rhs) const { // If RHS is a yet-to-be-propagated, then we need to check whether we can use the LHS to fill bool is_rhs_read = (rhs == read_symbol); // We propagate the write from the LHS if it's a write bool is_lhs_write = (lhs != read_symbol); return (is_rhs_read && is_lhs_write) ? lhs : rhs; } /// The read_symbol that is supposed to be replaced StackSymbolT read_symbol; }; /** * @brief Helper function object class to convert a StackOp to the stack symbol of that * StackOp. */ struct StackOpToStackSymbol { template <typename StackLevelT, typename ValueT> constexpr CUDF_HOST_DEVICE ValueT operator()(StackOp<StackLevelT, ValueT> const& kv_op) const { return kv_op.value; } }; /** * @brief Replaces all operations that apply to stack level '0' with the empty stack symbol */ template <typename StackOpT> struct RemapEmptyStack { constexpr CUDF_HOST_DEVICE StackOpT operator()(StackOpT const& kv_op) const { return kv_op.stack_level == 0 ? empty_stack_symbol : kv_op; } StackOpT empty_stack_symbol; }; } // namespace detail /** * @brief Takes a sparse representation of a sequence of stack operations that either push something * onto the stack or pop something from the stack and resolves the symbol that is on top of the * stack. * * @tparam SupportResetOperation Whether the logical stack also supports `reset` operations that * reset the stack to the empty stack * @tparam StackLevelT Signed integer type that must be sufficient to cover [-max_stack_level, * max_stack_level] for the given sequence of stack operations. Must be signed as it needs to cover * the stack level of any arbitrary subsequence of stack operations. * @tparam StackSymbolItT An input iterator type that provides the sequence of symbols that * represent stack operations * @tparam SymbolPositionT The index that this stack operation is supposed to apply to * @tparam StackSymbolToStackOpTypeT Function object class to transform items from StackSymbolItT to * stack_op_type * @tparam TopOfStackOutItT Output iterator type to which StackSymbolT are being assigned * @tparam StackSymbolT The internal type being used (usually corresponding to StackSymbolItT's * value_type) * @tparam OffsetT Signed or unsigned integer type large enough to index into both the sparse input * sequence and the top-of-stack output sequence * * @param[in] d_symbols Sequence of symbols that represent stack operations. Memory may alias with * \p d_top_of_stack * @param[in,out] d_symbol_positions Sequence of symbol positions (for a sparse representation), * sequence must be ordered in ascending order. Note, the memory of this array is repurposed for * double-buffering. * @param[in] symbol_to_stack_op Function object that returns a stack operation type (push, pop, or * read) for a given symbol from \p d_symbols * @param[out] d_top_of_stack A random access output iterator that will be populated with * what-is-on-top-of-the-stack for the given sequence of stack operations \p d_symbols * @param[in] empty_stack_symbol The symbol that will be written to top_of_stack whenever the stack * was empty * @param[in] read_symbol A symbol that may not be confused for a symbol that would push to the * stack * @param[in] num_symbols_out The number of symbols that are supposed to be filled with * what-is-on-top-of-the-stack * @param[in] stream The cuda stream to which to dispatch the work */ template <stack_op_support SupportResetOperation, typename StackLevelT, typename StackSymbolItT, typename SymbolPositionT, typename StackSymbolToStackOpTypeT, typename TopOfStackOutItT, typename StackSymbolT> void sparse_stack_op_to_top_of_stack(StackSymbolItT d_symbols, device_span<SymbolPositionT> d_symbol_positions, StackSymbolToStackOpTypeT symbol_to_stack_op, TopOfStackOutItT d_top_of_stack, StackSymbolT const empty_stack_symbol, StackSymbolT const read_symbol, std::size_t const num_symbols_out, rmm::cuda_stream_view stream) { rmm::device_buffer temp_storage{}; // Type used to hold pairs of (stack_level, value) pairs using StackOpT = detail::StackOp<StackLevelT, StackSymbolT>; // Type used to mark *-by-key segments after `reset` operations using StackSegmentT = uint8_t; // The unsigned integer type that we use for radix sorting items of type StackOpT using StackOpUnsignedT = detail::UnsignedStackOpType<StackOpT>; static_assert(!std::is_void<StackOpUnsignedT>(), "unsupported StackOpT size"); // Transforming sequence of stack symbols to stack operations using StackSymbolToStackOpT = detail::StackSymbolToStackOp<StackOpT, StackSymbolToStackOpTypeT>; // TransformInputIterator converting stack symbols to stack operations using TransformInputItT = cub::TransformInputIterator<StackOpT, StackSymbolToStackOpT, StackSymbolItT>; constexpr bool supports_reset_op = SupportResetOperation == stack_op_support::WITH_RESET_SUPPORT; auto const num_symbols_in = d_symbol_positions.size(); // Converting a stack symbol that may either push or pop to a stack operation: // stack_symbol -> ([+1,0,-1], stack_symbol) StackSymbolToStackOpT stack_sym_to_kv_op{symbol_to_stack_op}; TransformInputItT stack_symbols_in(d_symbols, stack_sym_to_kv_op); // Double-buffer for sorting along the given sequence of symbol positions (the sparse // representation) cub::DoubleBuffer<SymbolPositionT> d_symbol_positions_db{nullptr, nullptr}; // Double-buffer for sorting the stack operations by the stack level to which such operation // applies cub::DoubleBuffer<StackOpT> d_kv_operations{nullptr, nullptr}; // A double-buffer that aliases memory from d_kv_operations with unsigned types in order to // be able to perform a radix sort cub::DoubleBuffer<StackOpUnsignedT> d_kv_operations_unsigned{nullptr, nullptr}; constexpr std::size_t bits_per_byte = 8; constexpr std::size_t begin_bit = offsetof(StackOpT, stack_level) * bits_per_byte; constexpr std::size_t end_bit = begin_bit + (sizeof(StackOpT::stack_level) * bits_per_byte); // The stack operation that makes sure that reads for stack level '0' will be populated // with the empty_stack_symbol StackOpT const empty_stack{0, empty_stack_symbol}; cub::TransformInputIterator<StackOpT, detail::RemapEmptyStack<StackOpT>, StackOpT*> kv_ops_scan_in(nullptr, detail::RemapEmptyStack<StackOpT>{empty_stack}); StackOpT* kv_ops_scan_out = nullptr; std::size_t stack_level_scan_bytes = 0; std::size_t stack_level_sort_bytes = 0; std::size_t match_level_scan_bytes = 0; std::size_t propagate_writes_scan_bytes = 0; // Getting temporary storage requirements for the prefix sum of the stack level after each // operation if constexpr (supports_reset_op) { // Iterator that returns `1` for every symbol that corresponds to a `reset` operation auto reset_segments_it = thrust::make_transform_iterator( d_symbols, detail::NewlineToResetStackSegmentOp<StackSymbolToStackOpTypeT>{symbol_to_stack_op}); auto const fake_key_segment_it = static_cast<StackSegmentT*>(nullptr); std::size_t gen_segments_scan_bytes = 0; std::size_t scan_by_key_bytes = 0; CUDF_CUDA_TRY(cub::DeviceScan::InclusiveSum( nullptr, gen_segments_scan_bytes, reset_segments_it, thrust::make_transform_output_iterator(fake_key_segment_it, detail::ModToTargetTypeOpT<StackSegmentT>{}), num_symbols_in, stream)); CUDF_CUDA_TRY(cub::DeviceScan::InclusiveScanByKey( nullptr, scan_by_key_bytes, fake_key_segment_it, stack_symbols_in, d_kv_operations.Current(), detail::AddStackLevelFromStackOp<StackSymbolToStackOpTypeT>{symbol_to_stack_op}, num_symbols_in, cub::Equality{}, stream)); stack_level_scan_bytes = std::max(gen_segments_scan_bytes, scan_by_key_bytes); } else { CUDF_CUDA_TRY(cub::DeviceScan::InclusiveScan( nullptr, stack_level_scan_bytes, stack_symbols_in, d_kv_operations.Current(), detail::AddStackLevelFromStackOp<StackSymbolToStackOpTypeT>{symbol_to_stack_op}, num_symbols_in, stream)); } // Getting temporary storage requirements for the stable radix sort (sorting by stack level of the // operations) CUDF_CUDA_TRY(cub::DeviceRadixSort::SortPairs(nullptr, stack_level_sort_bytes, d_kv_operations_unsigned, d_symbol_positions_db, num_symbols_in, begin_bit, end_bit, stream)); // Getting temporary storage requirements for the scan to match pop operations with the latest // push of the same level CUDF_CUDA_TRY(cub::DeviceScan::InclusiveScan( nullptr, match_level_scan_bytes, kv_ops_scan_in, kv_ops_scan_out, detail::PopulatePopWithPush<StackSymbolToStackOpTypeT>{symbol_to_stack_op}, num_symbols_in, stream)); // Getting temporary storage requirements for the scan to propagate top-of-stack for spots that // didn't push or pop CUDF_CUDA_TRY( cub::DeviceScan::ExclusiveScan(nullptr, propagate_writes_scan_bytes, d_top_of_stack, d_top_of_stack, detail::PropagateLastWrite<StackSymbolT>{read_symbol}, empty_stack_symbol, num_symbols_out, stream)); // Scratch memory required by the algorithms auto total_temp_storage_bytes = std::max({stack_level_scan_bytes, stack_level_sort_bytes, match_level_scan_bytes, propagate_writes_scan_bytes}); if (temp_storage.size() < total_temp_storage_bytes) { temp_storage.resize(total_temp_storage_bytes, stream); } // Actual device buffer size, as we need to pass in an lvalue-ref to cub algorithms as // temp_storage_bytes total_temp_storage_bytes = temp_storage.size(); rmm::device_uvector<SymbolPositionT> d_symbol_position_alt{num_symbols_in, stream}; rmm::device_uvector<StackOpT> d_kv_ops_current{num_symbols_in, stream}; rmm::device_uvector<StackOpT> d_kv_ops_alt{num_symbols_in, stream}; //------------------------------------------------------------------------------ // ALGORITHM //------------------------------------------------------------------------------ // Initialize double-buffer for sorting the indexes of the sequence of sparse stack operations d_symbol_positions_db = cub::DoubleBuffer<SymbolPositionT>{d_symbol_positions.data(), d_symbol_position_alt.data()}; // Initialize double-buffer for sorting the indexes of the sequence of sparse stack operations d_kv_operations = cub::DoubleBuffer<StackOpT>{d_kv_ops_current.data(), d_kv_ops_alt.data()}; // Compute prefix sum of the stack level after each operation if constexpr (supports_reset_op) { // Iterator that returns `1` for every symbol that corresponds to a `reset` operation auto reset_segments_it = thrust::make_transform_iterator( d_symbols, detail::NewlineToResetStackSegmentOp<StackSymbolToStackOpTypeT>{symbol_to_stack_op}); rmm::device_uvector<StackSegmentT> key_segments{num_symbols_in, stream}; CUDF_CUDA_TRY(cub::DeviceScan::InclusiveSum( temp_storage.data(), total_temp_storage_bytes, reset_segments_it, thrust::make_transform_output_iterator(key_segments.data(), detail::ModToTargetTypeOpT<StackSegmentT>{}), num_symbols_in, stream)); CUDF_CUDA_TRY(cub::DeviceScan::InclusiveScanByKey( temp_storage.data(), total_temp_storage_bytes, key_segments.data(), stack_symbols_in, d_kv_operations.Current(), detail::AddStackLevelFromStackOp<StackSymbolToStackOpTypeT>{symbol_to_stack_op}, num_symbols_in, cub::Equality{}, stream)); } else { CUDF_CUDA_TRY(cub::DeviceScan::InclusiveScan( temp_storage.data(), total_temp_storage_bytes, stack_symbols_in, d_kv_operations.Current(), detail::AddStackLevelFromStackOp<StackSymbolToStackOpTypeT>{symbol_to_stack_op}, num_symbols_in, stream)); } // Stable radix sort, sorting by stack level of the operations d_kv_operations_unsigned = cub::DoubleBuffer<StackOpUnsignedT>{ reinterpret_cast<StackOpUnsignedT*>(d_kv_operations.Current()), reinterpret_cast<StackOpUnsignedT*>(d_kv_operations.Alternate())}; CUDF_CUDA_TRY(cub::DeviceRadixSort::SortPairs(temp_storage.data(), total_temp_storage_bytes, d_kv_operations_unsigned, d_symbol_positions_db, num_symbols_in, begin_bit, end_bit, stream)); // TransformInputIterator that remaps all operations on stack level 0 to the empty stack symbol kv_ops_scan_in = {reinterpret_cast<StackOpT*>(d_kv_operations_unsigned.Current()), detail::RemapEmptyStack<StackOpT>{empty_stack}}; kv_ops_scan_out = reinterpret_cast<StackOpT*>(d_kv_operations_unsigned.Alternate()); // Inclusive scan to match pop operations with the latest push operation of that level CUDF_CUDA_TRY(cub::DeviceScan::InclusiveScan( temp_storage.data(), total_temp_storage_bytes, kv_ops_scan_in, kv_ops_scan_out, detail::PopulatePopWithPush<StackSymbolToStackOpTypeT>{symbol_to_stack_op}, num_symbols_in, stream)); // Fill the output tape with read-symbol thrust::fill(rmm::exec_policy(stream), thrust::device_ptr<StackSymbolT>{d_top_of_stack}, thrust::device_ptr<StackSymbolT>{d_top_of_stack + num_symbols_out}, read_symbol); // Transform the stack operations to the stack symbol they represent cub::TransformInputIterator<StackSymbolT, detail::StackOpToStackSymbol, StackOpT*> kv_op_to_stack_sym_it(kv_ops_scan_out, detail::StackOpToStackSymbol{}); // Scatter the stack symbols to the output tape (spots that are not scattered to have been // pre-filled with the read-symbol) thrust::scatter(rmm::exec_policy(stream), kv_op_to_stack_sym_it, kv_op_to_stack_sym_it + num_symbols_in, d_symbol_positions_db.Current(), d_top_of_stack); // We perform an exclusive scan in order to fill the items at the very left that may // be reading the empty stack before there's the first push occurrence in the sequence. // Also, we're interested in the top-of-the-stack symbol before the operation was applied. CUDF_CUDA_TRY( cub::DeviceScan::ExclusiveScan(temp_storage.data(), total_temp_storage_bytes, d_top_of_stack, d_top_of_stack, detail::PropagateLastWrite<StackSymbolT>{read_symbol}, empty_stack_symbol, num_symbols_out, stream)); } } // namespace cudf::io::fst
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/fst/lookup_tables.cuh
/* * Copyright (c) 2022-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #pragma once #include <cudf/types.hpp> #include <io/fst/device_dfa.cuh> #include <io/utilities/hostdevice_vector.hpp> #include <cub/cub.cuh> #include <cuda/std/iterator> #include <algorithm> #include <cstdint> #include <iterator> #include <vector> namespace cudf::io::fst::detail { /** * @brief Helper function object that delegates a lookup to a given lookup table without mapping any * of the given arguments. */ struct IdentityOp { template <typename LookUpTableT, typename... Args> __host__ __device__ __forceinline__ auto operator()(LookUpTableT const& lookup_table, Args&&... args) const { return lookup_table.lookup(std::forward<Args>(args)...); } }; /** * @brief Class template that can be plugged into the finite-state machine to look up the symbol * group index for a given symbol. Class template does not support multi-symbol lookups (i.e., no * look-ahead). The class uses shared memory for the lookups. * * @tparam SymbolT The symbol type being passed in to lookup the corresponding symbol group id * @tparam PreMapOpT A function object that is invoked with `(lut, symbol)` and must return the * symbol group index of `symbol`. `lut` is an instance of the lookup table and `symbol` is the * symbol for which to get the symbol group index. If no particular mapping is needed, an instance * of `IdentityOp` can be used. */ template <typename SymbolT, typename PreMapOpT> class SingleSymbolSmemLUT { private: // Type used for representing a symbol group id (i.e., what we return for a given symbol) using SymbolGroupIdT = uint8_t; // Number of entries for every lookup (e.g., for 8-bit Symbol this is 256) static constexpr uint32_t NUM_ENTRIES_PER_LUT = 0x01U << (sizeof(SymbolT) * 8U); struct _TempStorage { // sym_to_sgid[symbol] -> symbol group index SymbolGroupIdT sym_to_sgid[NUM_ENTRIES_PER_LUT]; }; public: using TempStorage = cub::Uninitialized<_TempStorage>; struct KernelParameter { using LookupTableT = SingleSymbolSmemLUT<SymbolT, PreMapOpT>; // sym_to_sgid[min(symbol,num_valid_entries)] -> symbol group index uint32_t num_valid_entries; // sym_to_sgid[symbol] -> symbol group index SymbolGroupIdT sym_to_sgid[NUM_ENTRIES_PER_LUT]; // Function object that transforms a symbol to a symbol group id PreMapOpT pre_map_op; }; /** * @brief Initializes the given \p sgid_init with the symbol group lookups defined by \p * symbol_strings. * * @param symbol_strings Array of strings, where the i-th string holds all symbols * (characters!) that correspond to the i-th symbol group index * @param stream The stream that shall be used to cudaMemcpyAsync the lookup table * @return */ template <typename SymbolGroupItT> static KernelParameter InitDeviceSymbolGroupIdLut(SymbolGroupItT const& symbol_strings, PreMapOpT pre_map_op) { KernelParameter init_data{}; // The symbol group index to be returned if none of the given symbols match SymbolGroupIdT no_match_id = symbol_strings.size(); // The symbol with the largest value that is mapped to a symbol group id SymbolGroupIdT max_base_match_val = 0; // Initialize all entries: by default we return the no-match-id std::fill(&init_data.sym_to_sgid[0], &init_data.sym_to_sgid[NUM_ENTRIES_PER_LUT], no_match_id); // Set up lookup table uint32_t sg_id = 0; // Iterate over the symbol groups for (auto const& sg_symbols : symbol_strings) { // Iterate over all symbols that belong to the current symbol group for (auto const& sg_symbol : sg_symbols) { max_base_match_val = std::max(max_base_match_val, static_cast<SymbolGroupIdT>(sg_symbol)); init_data.sym_to_sgid[static_cast<int32_t>(sg_symbol)] = sg_id; } sg_id++; } // Initialize the out-of-bounds lookup: sym_to_sgid[max_base_match_val+1] -> no_match_id init_data.sym_to_sgid[max_base_match_val + 1] = no_match_id; // Alias memory / return memory requirements init_data.num_valid_entries = max_base_match_val + 1; init_data.pre_map_op = pre_map_op; return init_data; } _TempStorage& temp_storage; SymbolGroupIdT num_valid_entries; PreMapOpT pre_map_op; __device__ __forceinline__ _TempStorage& PrivateStorage() { __shared__ _TempStorage private_storage; return private_storage; } /** * @brief Initializes the lookup table, primarily to be invoked from within device code but also * provides host-side implementation for verification. * @note Synchronizes the thread block, if called from device, and, hence, requires all threads * of the thread block to call the constructor */ constexpr CUDF_HOST_DEVICE SingleSymbolSmemLUT(KernelParameter const& kernel_param, TempStorage& temp_storage) : temp_storage(temp_storage.Alias()), num_valid_entries(kernel_param.num_valid_entries) { // GPU-side init #if CUB_PTX_ARCH > 0 for (int32_t i = threadIdx.x; i < kernel_param.num_valid_entries; i += blockDim.x) { this->temp_storage.sym_to_sgid[i] = kernel_param.sym_to_sgid[i]; } __syncthreads(); #else // CPU-side init std::copy_n(kernel_param.sym_to_sgid, kernel_param.num_luts, this->temp_storage.sym_to_sgid); #endif } template <typename SymbolT_> constexpr CUDF_HOST_DEVICE int32_t operator()(SymbolT_ const symbol) const { // Look up the symbol group for given symbol return pre_map_op(*this, symbol); } constexpr CUDF_HOST_DEVICE int32_t lookup(SymbolT const symbol) const { // Look up the symbol group for given symbol return temp_storage .sym_to_sgid[min(static_cast<SymbolGroupIdT>(symbol), num_valid_entries - 1U)]; } }; /** * @brief A simple symbol group lookup wrapper that uses a simple function object to * retrieve the symbol group id for a symbol. * * @tparam SymbolGroupLookupOpT The function object type to return the symbol group for a given * symbol */ template <typename SymbolGroupLookupOpT> class SymbolGroupLookupOp { private: struct _TempStorage {}; public: using TempStorage = cub::Uninitialized<_TempStorage>; struct KernelParameter { // Declare the member type that the DFA is going to instantiate using LookupTableT = SymbolGroupLookupOp<SymbolGroupLookupOpT>; SymbolGroupLookupOpT sgid_lookup_op; }; static KernelParameter InitDeviceSymbolGroupIdLut(SymbolGroupLookupOpT sgid_lookup_op) { return KernelParameter{sgid_lookup_op}; } private: _TempStorage& temp_storage; SymbolGroupLookupOpT sgid_lookup_op; __device__ __forceinline__ _TempStorage& PrivateStorage() { __shared__ _TempStorage private_storage; return private_storage; } public: CUDF_HOST_DEVICE SymbolGroupLookupOp(KernelParameter const& kernel_param, TempStorage& temp_storage) : temp_storage(temp_storage.Alias()), sgid_lookup_op(kernel_param.sgid_lookup_op) { } template <typename SymbolT_> constexpr CUDF_HOST_DEVICE int32_t operator()(SymbolT_ const symbol) const { // Look up the symbol group for given symbol return sgid_lookup_op(symbol); } }; /** * @brief Prepares a simple symbol group lookup wrapper that uses a simple function object to * retrieve the symbol group id for a symbol. * * @tparam FunctorT A function object type that must implement the signature `int32_t * operator()(symbol)`, where `symbol` is a symbol from the input type. * @param sgid_lookup_op A function object that must implement the signature `int32_t * operator()(symbol)`, where `symbol` is a symbol from the input type. * @return The kernel parameter of type SymbolGroupLookupOp::KernelParameter that is used to * initialize a simple symbol group id lookup wrapper */ template <typename FunctorT> auto make_symbol_group_lookup_op(FunctorT sgid_lookup_op) { return SymbolGroupLookupOp<FunctorT>::InitDeviceSymbolGroupIdLut(sgid_lookup_op); } /** * @brief Creates a symbol group lookup table of type `SingleSymbolSmemLUT` that uses a two-staged * lookup approach. @p pre_map_op is a function object invoked with `(lut, symbol)` that must return * the symbol group id for the given `symbol`. `lut` is an instance of the lookup table * and `symbol` is a symbol from the input tape. Usually, @p pre_map_op first maps a symbol from * the input tape to an integral that is convertible to `symbol_t`. In a second stage, @p pre_map_op * uses `lut`'s `lookup(mapped_symbol)` that maps that integral to the symbol group id. * * @tparam symbol_t Must be an integral type * @tparam NUM_SYMBOL_GROUPS The number of symbol groups, excluding the catchall symbol group (aka * "other" symbol group) * @tparam pre_map_op_t A unary function object type that returns the symbol group id * @param symbol_strings An array of vectors, where all the symbols in the i-th vector are mapped to * the i-th symbol group * @param pre_map_op A unary function object type that returns the symbol group id for a symbol * @return A symbol group lookup table */ template <typename symbol_t, std::size_t NUM_SYMBOL_GROUPS, typename pre_map_op_t> auto make_symbol_group_lut( std::array<std::vector<symbol_t>, NUM_SYMBOL_GROUPS> const& symbol_strings, pre_map_op_t pre_map_op) { using lookup_table_t = SingleSymbolSmemLUT<symbol_t, pre_map_op_t>; return lookup_table_t::InitDeviceSymbolGroupIdLut(symbol_strings, pre_map_op); } /** * @brief Creates a symbol group lookup table of type `SingleSymbolSmemLUT` that uses a two-staged * lookup approach. @p pre_map_op is a function object invoked with `(lut, symbol)` that must return * the symbol group id for the given `symbol`. `lut` is an instance of the lookup table * and `symbol` is a symbol from the input tape. Usually, @p pre_map_op first maps a symbol from * the input tape to an integral that is convertible to `symbol_t`. In a second stage, @p pre_map_op * uses `lut`'s `lookup(mapped_symbol)` that maps that integral to the symbol group id. * * @tparam symbol_t The type returned by @p pre_map_op must be assignable to `char` * @tparam NUM_SYMBOL_GROUPS The number of symbol groups, excluding the catchall symbol group (aka * "other" symbol group) * @tparam pre_map_op_t A unary function object type that returns the symbol group id for a symbol * @param symbol_strings An array of strings, where all the characters in the i-th string are mapped * to the i-th symbol group * @param pre_map_op A unary function object type that returns the symbol group id for a symbol * @return A symbol group lookup table */ template <std::size_t NUM_SYMBOL_GROUPS, typename pre_map_op_t> auto make_symbol_group_lut(std::array<std::string, NUM_SYMBOL_GROUPS> const& symbol_strings, pre_map_op_t pre_map_op) { using symbol_t = char; using lookup_table_t = SingleSymbolSmemLUT<symbol_t, pre_map_op_t>; return lookup_table_t::InitDeviceSymbolGroupIdLut(symbol_strings, pre_map_op); } /** * @brief Creates a symbol group lookup table that maps a symbol to a symbol group id, requiring the * symbol type from the input tape to be assignable to `symbol_t` and `symbol_t` to be of integral * type. * * @tparam symbol_t The input tape's symbol type must be assignable to this type * @tparam NUM_SYMBOL_GROUPS The number of symbol groups, excluding the catchall symbol group (aka * "other" symbol group) * @param symbol_strings An array of vectors, where all the symbols in the i-th vector are mapped to * the i-th symbol group * @return A symbol group lookup table */ template <typename symbol_t, std::size_t NUM_SYMBOL_GROUPS> auto make_symbol_group_lut( std::array<std::vector<symbol_t>, NUM_SYMBOL_GROUPS> const& symbol_strings) { return make_symbol_group_lut(symbol_strings, IdentityOp{}); } /** * @brief Creates a symbol group lookup table that maps a symbol to a symbol group id, requiring the * symbol type from the input tape to be assignable to `symbol_t` and `symbol_t` to be of integral * type. * * @tparam symbol_t The input tape's symbol type must be assignable to this type * @tparam NUM_SYMBOL_GROUPS The number of symbol groups, excluding the catchall symbol group (aka * "other" symbol group) * @param symbol_strings An array of strings, where all the characters in the i-th string are mapped * to the i-th symbol group * @return A symbol group lookup table */ template <std::size_t NUM_SYMBOL_GROUPS> auto make_symbol_group_lut(std::array<std::string, NUM_SYMBOL_GROUPS> const& symbol_strings) { return make_symbol_group_lut(symbol_strings, IdentityOp{}); } /** * @brief Lookup table mapping (old_state, symbol_group_id) transitions to a new target state. The * class uses shared memory for the lookups. * * @tparam MAX_NUM_SYMBOLS The maximum number of symbols being output by a single state transition * @tparam MAX_NUM_STATES The maximum number of states that this lookup table shall support */ template <int32_t MAX_NUM_SYMBOLS, int32_t MAX_NUM_STATES> class TransitionTable { private: // Type used using ItemT = char; struct _TempStorage { ItemT transitions[MAX_NUM_STATES * MAX_NUM_SYMBOLS]; }; public: static constexpr int32_t NUM_STATES = MAX_NUM_STATES; using TempStorage = cub::Uninitialized<_TempStorage>; struct KernelParameter { using LookupTableT = TransitionTable<MAX_NUM_SYMBOLS, MAX_NUM_STATES>; ItemT transitions[MAX_NUM_STATES * MAX_NUM_SYMBOLS]; }; template <typename StateIdT> static KernelParameter InitDeviceTransitionTable( std::array<std::array<StateIdT, MAX_NUM_SYMBOLS>, MAX_NUM_STATES> const& translation_table) { KernelParameter init_data{}; // translation_table[state][symbol] -> new state for (std::size_t state = 0; state < translation_table.size(); ++state) { for (std::size_t symbol = 0; symbol < translation_table[state].size(); ++symbol) { CUDF_EXPECTS( static_cast<int64_t>(translation_table[state][symbol]) <= std::numeric_limits<ItemT>::max(), "Target state index value exceeds value representable by the transition table's type"); init_data.transitions[symbol * MAX_NUM_STATES + state] = static_cast<ItemT>(translation_table[state][symbol]); } } return init_data; } constexpr CUDF_HOST_DEVICE TransitionTable(KernelParameter const& kernel_param, TempStorage& temp_storage) : temp_storage(temp_storage.Alias()) { #if CUB_PTX_ARCH > 0 for (int i = threadIdx.x; i < MAX_NUM_STATES * MAX_NUM_SYMBOLS; i += blockDim.x) { this->temp_storage.transitions[i] = kernel_param.transitions[i]; } __syncthreads(); #else std::copy_n( kernel_param.transitions, MAX_NUM_STATES * MAX_NUM_SYMBOLS, this->temp_storage.transitions); #endif } /** * @brief Returns a random-access iterator to lookup all the state transitions for one specific * symbol from an arbitrary old_state, i.e., it[old_state] -> new_state. * * @param state_id The DFA's current state index from which we'll transition * @param match_id The symbol group id of the symbol that we just read in * @return */ template <typename StateIndexT, typename SymbolIndexT> constexpr CUDF_HOST_DEVICE int32_t operator()(StateIndexT const state_id, SymbolIndexT const match_id) const { return temp_storage.transitions[match_id * MAX_NUM_STATES + state_id]; } private: _TempStorage& temp_storage; __device__ __forceinline__ _TempStorage& PrivateStorage() { __shared__ _TempStorage private_storage; return private_storage; } }; /** * @brief Creates a transition table of type `TransitionTable` that maps `(state_id, match_id)` * pairs to the new target state for the given `(state_id, match_id)`-combination. * * @tparam StateIdT An integral type used to represent state indexes * @tparam MAX_NUM_SYMBOLS The maximum number of symbols being output by a single state transition * @tparam MAX_NUM_STATES The maximum number of states that this lookup table shall support * @param transition_table The transition table * @return A transition table of type `TransitionTable` */ template <typename StateIdT, std::size_t MAX_NUM_SYMBOLS, std::size_t MAX_NUM_STATES> auto make_transition_table( std::array<std::array<StateIdT, MAX_NUM_SYMBOLS>, MAX_NUM_STATES> const& transition_table) { using transition_table_t = TransitionTable<MAX_NUM_SYMBOLS, MAX_NUM_STATES>; return transition_table_t::InitDeviceTransitionTable(transition_table); } /** * @brief Compile-time reflection to check if `OpT` type has the `TempStorage` and * `KernelParameter` type members. */ template <typename OpT, typename = void> struct is_complex_op : std::false_type {}; template <typename OpT> struct is_complex_op<OpT, std::void_t<typename OpT::TempStorage, typename OpT::KernelParameter>> : std::true_type {}; /** * @brief The device view that is passed to the finite-state transducer algorithm. Each of the * lookup tables can either be a simple function object that defines the `operator()` required for * respective lookup table or a complex class. * * @tparam SymbolGroupIdLookupT * @tparam TransitionTableT * @tparam TranslationTableT * @tparam NUM_STATES */ template <typename SymbolGroupIdLookupT, typename TransitionTableT, typename TranslationTableT, int32_t NUM_STATES> class dfa_device_view { private: // Complex symbol group lookup operators need to declare a `TempStorage` and `KernelParameter` // type member that is passed during device-side initialization. using sgid_lut_init_t = std::conditional_t<is_complex_op<SymbolGroupIdLookupT>::value, typename SymbolGroupIdLookupT::KernelParameter, SymbolGroupIdLookupT>; // Complex transition table lookup operators need to declare a `TempStorage` and // `KernelParameter` type member that is passed during device-side initialization. using transition_table_init_t = std::conditional_t<is_complex_op<TransitionTableT>::value, typename TransitionTableT::KernelParameter, TransitionTableT>; // Complex translation table lookup operators need to declare a `TempStorage` and // `KernelParameter` type member that is passed during device-side initialization. using translation_table_init_t = std::conditional_t<is_complex_op<TranslationTableT>::value, typename TranslationTableT::KernelParameter, TranslationTableT>; public: // The maximum number of states supported by this DFA instance // This is a value queried by the DFA simulation algorithm static constexpr int32_t MAX_NUM_STATES = NUM_STATES; using SymbolGroupStorageT = std::conditional_t<is_complex_op<SymbolGroupIdLookupT>::value, typename SymbolGroupIdLookupT::TempStorage, typename cub::NullType>; using TransitionTableStorageT = std::conditional_t<is_complex_op<TransitionTableT>::value, typename TransitionTableT::TempStorage, typename cub::NullType>; using TranslationTableStorageT = std::conditional_t<is_complex_op<TranslationTableT>::value, typename TranslationTableT::TempStorage, typename cub::NullType>; __device__ auto InitSymbolGroupLUT(SymbolGroupStorageT& temp_storage) { return SymbolGroupIdLookupT(*d_sgid_lut_init, temp_storage); } __device__ auto InitTransitionTable(TransitionTableStorageT& temp_storage) { return TransitionTableT(*d_transition_table_init, temp_storage); } __device__ auto InitTranslationTable(TranslationTableStorageT& temp_storage) { return TranslationTableT(*d_translation_table_init, temp_storage); } dfa_device_view(sgid_lut_init_t const* d_sgid_lut_init, transition_table_init_t const* d_transition_table_init, translation_table_init_t const* d_translation_table_init) : d_sgid_lut_init(d_sgid_lut_init), d_transition_table_init(d_transition_table_init), d_translation_table_init(d_translation_table_init) { } private: sgid_lut_init_t const* d_sgid_lut_init; transition_table_init_t const* d_transition_table_init; translation_table_init_t const* d_translation_table_init; }; /** * @brief Lookup table mapping (old_state, symbol_group_id) transitions to a sequence of symbols * that the finite-state transducer is supposed to output for each transition. The class uses * shared memory for the lookups. * * @tparam OutSymbolT The symbol type being output * @tparam OutSymbolOffsetT Type sufficiently large to index into the lookup table of output * symbols * @tparam MAX_NUM_SYMBOLS The maximum number of symbols being output by a single state transition * @tparam MAX_NUM_STATES The maximum number of states that this lookup table shall support * @tparam MAX_TABLE_SIZE The maximum number of items in the lookup table of output symbols * be used. */ template <typename OutSymbolT, typename OutSymbolOffsetT, int32_t MAX_NUM_SYMBOLS, int32_t MAX_NUM_STATES, int32_t MAX_TABLE_SIZE = (MAX_NUM_SYMBOLS * MAX_NUM_STATES)> class TransducerLookupTable { private: struct _TempStorage { OutSymbolOffsetT out_offset[MAX_NUM_STATES * MAX_NUM_SYMBOLS + 1]; OutSymbolT out_symbols[MAX_TABLE_SIZE]; }; public: using TempStorage = cub::Uninitialized<_TempStorage>; struct KernelParameter { using LookupTableT = TransducerLookupTable<OutSymbolT, OutSymbolOffsetT, MAX_NUM_SYMBOLS, MAX_NUM_STATES, MAX_TABLE_SIZE>; OutSymbolOffsetT d_out_offsets[MAX_NUM_STATES * MAX_NUM_SYMBOLS + 1]; OutSymbolT d_out_symbols[MAX_TABLE_SIZE]; }; /** * @brief Initializes the lookup table, primarily to be invoked from within device code but also * provides host-side implementation for verification. * @note Synchronizes the thread block, if called from device, and, hence, requires all threads * of the thread block to call the constructor */ static KernelParameter InitDeviceTranslationTable( std::array<std::array<std::vector<OutSymbolT>, MAX_NUM_SYMBOLS>, MAX_NUM_STATES> const& translation_table) { KernelParameter init_data; std::vector<OutSymbolT> out_symbols; out_symbols.reserve(MAX_TABLE_SIZE); std::vector<OutSymbolOffsetT> out_symbol_offsets; out_symbol_offsets.reserve(MAX_NUM_STATES * MAX_NUM_SYMBOLS + 1); out_symbol_offsets.push_back(0); // Iterate over the states in the transition table for (auto const& state_trans : translation_table) { uint32_t num_added = 0; // Iterate over the symbols in the transition table for (auto const& symbol_out : state_trans) { // Insert the output symbols for this specific (state, symbol) transition out_symbols.insert(std::end(out_symbols), std::begin(symbol_out), std::end(symbol_out)); out_symbol_offsets.push_back(out_symbols.size()); num_added++; } // Copy the last offset for all symbols (to guarantee a proper lookup for omitted symbols of // this state) if (MAX_NUM_SYMBOLS > num_added) { int32_t count = MAX_NUM_SYMBOLS - num_added; auto begin_it = std::prev(std::end(out_symbol_offsets)); std::fill_n(begin_it, count, out_symbol_offsets[0]); } } // Check whether runtime-provided table size exceeds the compile-time given max. table size CUDF_EXPECTS(out_symbols.size() <= MAX_TABLE_SIZE, "Unsupported translation table"); // Prepare host-side data to be copied and passed to the device std::copy( std::cbegin(out_symbol_offsets), std::cend(out_symbol_offsets), init_data.d_out_offsets); std::copy(std::cbegin(out_symbols), std::cend(out_symbols), init_data.d_out_symbols); return init_data; } private: _TempStorage& temp_storage; __device__ __forceinline__ _TempStorage& PrivateStorage() { __shared__ _TempStorage private_storage; return private_storage; } public: /** * @brief Initializes the lookup table, primarily to be invoked from within device code but also * provides host-side implementation for verification. * @note Synchronizes the thread block, if called from device, and, hence, requires all threads * of the thread block to call the constructor */ CUDF_HOST_DEVICE TransducerLookupTable(KernelParameter const& kernel_param, TempStorage& temp_storage) : temp_storage(temp_storage.Alias()) { constexpr uint32_t num_offsets = MAX_NUM_STATES * MAX_NUM_SYMBOLS + 1; #if CUB_PTX_ARCH > 0 for (int i = threadIdx.x; i < num_offsets; i += blockDim.x) { this->temp_storage.out_offset[i] = kernel_param.d_out_offsets[i]; } // Make sure all threads in the block can read out_symbol_offsets[num_offsets - 1] from shared // memory __syncthreads(); for (int i = threadIdx.x; i < this->temp_storage.out_offset[num_offsets - 1]; i += blockDim.x) { this->temp_storage.out_symbols[i] = kernel_param.d_out_symbols[i]; } __syncthreads(); #else std::copy_n(kernel_param.d_out_offsets, num_offsets, this->temp_storage.out_symbol_offsets); std::copy_n(kernel_param.d_out_symbols, this->temp_storage.out_symbol_offsets, this->temp_storage.out_symbols); #endif } template <typename StateIndexT, typename SymbolIndexT, typename RelativeOffsetT, typename SymbolT> constexpr CUDF_HOST_DEVICE auto operator()(StateIndexT const state_id, SymbolIndexT const match_id, RelativeOffsetT const relative_offset, SymbolT const /*read_symbol*/) const { auto offset = temp_storage.out_offset[state_id * MAX_NUM_SYMBOLS + match_id] + relative_offset; return temp_storage.out_symbols[offset]; } template <typename StateIndexT, typename SymbolIndexT, typename SymbolT> constexpr CUDF_HOST_DEVICE OutSymbolOffsetT operator()(StateIndexT const state_id, SymbolIndexT const match_id, SymbolT const /*read_symbol*/) const { return temp_storage.out_offset[state_id * MAX_NUM_SYMBOLS + match_id + 1] - temp_storage.out_offset[state_id * MAX_NUM_SYMBOLS + match_id]; } }; /** * @brief Creates a translation table that maps (old_state, symbol_group_id) transitions to a * sequence of symbols that the finite-state transducer is supposed to output for each transition. * * @tparam MAX_TABLE_SIZE The maximum number of items in the lookup table of output symbols * be used * @tparam OutSymbolT The symbol type being output * @tparam MAX_NUM_SYMBOLS The maximum number of symbols being output by a single state transition * @tparam MAX_NUM_STATES The maximum number of states that this lookup table shall support * @param translation_table The translation table * @return A translation table of type `TransducerLookupTable`. */ template <std::size_t MAX_TABLE_SIZE, typename OutSymbolT, std::size_t MAX_NUM_SYMBOLS, std::size_t MAX_NUM_STATES> auto make_translation_table(std::array<std::array<std::vector<OutSymbolT>, MAX_NUM_SYMBOLS>, MAX_NUM_STATES> const& translation_table) { using OutSymbolOffsetT = int32_t; using translation_table_t = TransducerLookupTable<OutSymbolT, OutSymbolOffsetT, MAX_NUM_SYMBOLS, MAX_NUM_STATES, MAX_TABLE_SIZE>; return translation_table_t::InitDeviceTranslationTable(translation_table); } template <typename TranslationOpT> class TranslationOp { private: struct _TempStorage {}; public: using TempStorage = cub::Uninitialized<_TempStorage>; struct KernelParameter { using LookupTableT = TranslationOp<TranslationOpT>; TranslationOpT translation_op; }; /** * @brief Initializes the lookup table, primarily to be invoked from within device code but also * provides host-side implementation for verification. * @note Synchronizes the thread block, if called from device, and, hence, requires all threads * of the thread block to call the constructor */ static KernelParameter InitDeviceTranslationTable(TranslationOpT translation_op) { return KernelParameter{translation_op}; } private: _TempStorage& temp_storage; TranslationOpT translation_op; __device__ __forceinline__ _TempStorage& PrivateStorage() { __shared__ _TempStorage private_storage; return private_storage; } public: CUDF_HOST_DEVICE TranslationOp(KernelParameter const& kernel_param, TempStorage& temp_storage) : temp_storage(temp_storage.Alias()), translation_op(kernel_param.translation_op) { } template <typename StateIndexT, typename SymbolIndexT, typename RelativeOffsetT, typename SymbolT> constexpr CUDF_HOST_DEVICE auto operator()(StateIndexT const state_id, SymbolIndexT const match_id, RelativeOffsetT const relative_offset, SymbolT const read_symbol) const { return translation_op(state_id, match_id, relative_offset, read_symbol); } template <typename StateIndexT, typename SymbolIndexT, typename SymbolT> constexpr CUDF_HOST_DEVICE auto operator()(StateIndexT const state_id, SymbolIndexT const match_id, SymbolT const read_symbol) const { return translation_op(state_id, match_id, read_symbol); } }; /** * @brief Creates a simple translation table that uses a simple function object to retrieve the * * @tparam FunctorT A function object type that must implement two signatures: (1) with `(state_id, * match_id, read_symbol)` and (2) with `(state_id, match_id, relative_offset, read_symbol)` * @param map_op A function object that must implement two signatures: (1) with `(state_id, * match_id, read_symbol)` and (2) with `(state_id, match_id, relative_offset, read_symbol)`. * Invocations of the first signature, (1), must return the number of symbols that are emitted for * the given transition. The second signature, (2), must return the i-th symbol to be emitted for * that transition, where `i` corresponds to `relative_offse` * @return A translation table of type `TranslationO` */ template <typename FunctorT> auto make_translation_functor(FunctorT map_op) { return TranslationOp<FunctorT>::InitDeviceTranslationTable(map_op); } /** * @brief Helper class to facilitate the specification and instantiation of a DFA (i.e., the * transition table and its number of states, the mapping of symbols to symbol groups, and the * translation table that specifies which state transitions cause which output to be written). * * @tparam OutSymbolT The symbol type being output by the finite-state transducer * @tparam NUM_SYMBOLS The number of symbol groups amongst which to differentiate including the * wildcard symbol group (one dimension of the transition table) * @tparam NUM_STATES The number of states defined by the DFA (the other dimension of the * transition table) */ template <typename SymbolGroupIdInitT, typename TransitionTableInitT, typename TranslationTableInitT> class Dfa { static constexpr int32_t single_item = 1; public: auto get_device_view() { return dfa_device_view<typename SymbolGroupIdInitT::LookupTableT, typename TransitionTableInitT::LookupTableT, typename TranslationTableInitT::LookupTableT, TransitionTableInitT::LookupTableT::NUM_STATES>{ &init_data.d_begin()->sgid_lut_init, &init_data.d_begin()->transition_table_init, &init_data.d_begin()->translation_table_init}; } Dfa(SymbolGroupIdInitT const& sgid_lut_init, TransitionTableInitT const& transition_table_init, TranslationTableInitT const& translation_table_init, rmm::cuda_stream_view stream) : init_data{single_item, stream} { *init_data.host_ptr() = {sgid_lut_init, transition_table_init, translation_table_init}; init_data.host_to_device_async(stream); } /** * @brief Dispatches the finite-state transducer algorithm to the GPU. * * @tparam SymbolT The atomic symbol type from the input tape * @tparam TransducedOutItT Random-access output iterator to which the transduced output will be * written * @tparam TransducedIndexOutItT Random-access output iterator type to which the input symbols' * indexes are written. * @tparam TransducedCountOutItT A single-item output iterator type to which the total number of * output symbols is written * @tparam OffsetT A type large enough to index into either of both: (a) the input symbols and * (b) the output symbols * @param d_chars Pointer to the input string of symbols * @param num_chars The total number of input symbols to process * @param d_out_it Random-access output iterator to which the transduced output is * written * @param d_out_idx_it Random-access output iterator to which, the index i is written * iff the i-th input symbol caused some output to be written * @param d_num_transduced_out_it A single-item output iterator type to which the total number * of output symbols is written * @param seed_state The DFA's starting state. For streaming DFAs this corresponds to the * "end-state" of the previous invocation of the algorithm. * @param stream CUDA stream to launch kernels within. Default is the null-stream. */ template <typename SymbolItT, typename TransducedOutItT, typename TransducedIndexOutItT, typename TransducedCountOutItT, typename OffsetT> void Transduce(SymbolItT d_chars_it, OffsetT num_chars, TransducedOutItT d_out_it, TransducedIndexOutItT d_out_idx_it, TransducedCountOutItT d_num_transduced_out_it, uint32_t const seed_state, rmm::cuda_stream_view stream) { std::size_t temp_storage_bytes = 0; rmm::device_buffer temp_storage{}; DeviceTransduce(nullptr, temp_storage_bytes, this->get_device_view(), d_chars_it, num_chars, d_out_it, d_out_idx_it, d_num_transduced_out_it, seed_state, stream); if (temp_storage.size() < temp_storage_bytes) { temp_storage.resize(temp_storage_bytes, stream); } DeviceTransduce(temp_storage.data(), temp_storage_bytes, this->get_device_view(), d_chars_it, num_chars, d_out_it, d_out_idx_it, d_num_transduced_out_it, seed_state, stream); } private: struct host_device_data { SymbolGroupIdInitT sgid_lut_init; TransitionTableInitT transition_table_init; TranslationTableInitT translation_table_init; }; cudf::detail::hostdevice_vector<host_device_data> init_data{}; }; /** * @brief Creates a deterministic finite automaton (DFA) as specified by the triple of (symbol * group, transition, translation)-lookup tables to be used with the finite-state transducer * algorithm. * * @param sgid_lut_init Object used to initialize the symbol group lookup table * @param transition_table_init Object used to initialize the transition table * @param translation_table_init Object used to initialize the translation table * @param stream The stream used to allocate and initialize device-side memory that is used to * initialize the lookup tables * @return A DFA of type `Dfa`. */ template <typename SymbolGroupIdInitT, typename TransitionTableInitT, typename TranslationTableInitT> auto make_fst(SymbolGroupIdInitT const& sgid_lut_init, TransitionTableInitT const& transition_table_init, TranslationTableInitT const& translation_table_init, rmm::cuda_stream_view stream) { return Dfa<SymbolGroupIdInitT, TransitionTableInitT, TranslationTableInitT>( sgid_lut_init, transition_table_init, translation_table_init, stream); } } // namespace cudf::io::fst::detail
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/fst/dispatch_dfa.cuh
/* * Copyright (c) 2022, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #pragma once #include "agent_dfa.cuh" #include "in_reg_array.cuh" #include <cub/cub.cuh> #include <cstdint> namespace cudf::io::fst::detail { /** * @brief The tuning policy comprising all the architecture-specific compile-time tuning parameters. * * @tparam _BLOCK_THREADS Number of threads per block * @tparam _ITEMS_PER_THREAD Number of symbols processed by each thread */ template <int32_t _BLOCK_THREADS, int32_t _ITEMS_PER_THREAD> struct AgentDFAPolicy { // The number of threads per block static constexpr int32_t BLOCK_THREADS = _BLOCK_THREADS; // The number of symbols processed by each thread static constexpr int32_t ITEMS_PER_THREAD = _ITEMS_PER_THREAD; }; /** * @brief The list of architecture-specific tuning policies. Yet TBD. */ struct DeviceFSMPolicy { //------------------------------------------------------------------------------ // Architecture-specific tuning policies //------------------------------------------------------------------------------ struct Policy900 : cub::ChainedPolicy<900, Policy900, Policy900> { enum { BLOCK_THREADS = 128, ITEMS_PER_THREAD = 32, }; using AgentDFAPolicy = AgentDFAPolicy<BLOCK_THREADS, ITEMS_PER_THREAD>; }; // Top-of-list of the tuning policy "chain" using MaxPolicy = Policy900; }; /** * @brief Kernel for initializing single-pass prefix scan tile states * * @param items_state The tile state * @param num_tiles The number of tiles to be initialized * @return */ template <typename TileState> __global__ void initialization_pass_kernel(TileState items_state, uint32_t num_tiles) { items_state.InitializeStatus(num_tiles); } template <typename DfaT, typename SymbolItT, typename TransducedOutItT, typename TransducedIndexOutItT, typename TransducedCountOutItT, typename OffsetT> struct DispatchFSM : DeviceFSMPolicy { //------------------------------------------------------------------------------ // DEFAULT TYPES //------------------------------------------------------------------------------ using StateIndexT = uint32_t; using BlockOffsetT = uint32_t; //------------------------------------------------------------------------------ // DERIVED CONFIGS //------------------------------------------------------------------------------ // DFA-specific configs static constexpr int32_t MAX_NUM_STATES = DfaT::MAX_NUM_STATES; static constexpr int32_t MAX_NUM_SYMBOLS = DfaT::MAX_NUM_SYMBOLS; // Whether to use a single-pass prefix scan that does all in on static constexpr bool SINGLE_PASS_STV = false; // Whether this is a finite-state transform static constexpr bool IS_FST = true; //------------------------------------------------------------------------------ // TYPEDEFS //------------------------------------------------------------------------------ using StateVectorCompositeOpT = VectorCompositeOp<MAX_NUM_STATES>; //------------------------------------------------------------------------------ // MEMBER VARS //------------------------------------------------------------------------------ void* d_temp_storage; size_t& temp_storage_bytes; DfaT dfa; StateIndexT seed_state; SymbolItT d_chars_in; OffsetT num_chars; TransducedOutItT transduced_out_it; TransducedIndexOutItT transduced_out_idx_it; TransducedCountOutItT d_num_transduced_out_it; cudaStream_t stream; int const ptx_version; //------------------------------------------------------------------------------ // CONSTRUCTOR //------------------------------------------------------------------------------ CUB_RUNTIME_FUNCTION __forceinline__ DispatchFSM(void* d_temp_storage, size_t& temp_storage_bytes, DfaT dfa, StateIndexT seed_state, SymbolItT d_chars_in, OffsetT num_chars, TransducedOutItT transduced_out_it, TransducedIndexOutItT transduced_out_idx_it, TransducedCountOutItT d_num_transduced_out_it, cudaStream_t stream, int ptx_version) : d_temp_storage(d_temp_storage), temp_storage_bytes(temp_storage_bytes), dfa(dfa), seed_state(seed_state), d_chars_in(d_chars_in), num_chars(num_chars), transduced_out_it(transduced_out_it), transduced_out_idx_it(transduced_out_idx_it), d_num_transduced_out_it(d_num_transduced_out_it), stream(stream), ptx_version(ptx_version) { } //------------------------------------------------------------------------------ // DISPATCH INTERFACE //------------------------------------------------------------------------------ CUB_RUNTIME_FUNCTION __forceinline__ static cudaError_t Dispatch( void* d_temp_storage, size_t& temp_storage_bytes, DfaT dfa, StateIndexT seed_state, SymbolItT d_chars_in, OffsetT num_chars, TransducedOutItT transduced_out_it, TransducedIndexOutItT transduced_out_idx_it, TransducedCountOutItT d_num_transduced_out_it, cudaStream_t stream) { using MaxPolicyT = DispatchFSM::MaxPolicy; cudaError_t error; // Get PTX version int ptx_version; error = cub::PtxVersion(ptx_version); if (error != cudaSuccess) return error; // Create dispatch functor DispatchFSM dispatch(d_temp_storage, temp_storage_bytes, dfa, seed_state, d_chars_in, num_chars, transduced_out_it, transduced_out_idx_it, d_num_transduced_out_it, stream, ptx_version); error = MaxPolicyT::Invoke(ptx_version, dispatch); return error; } //------------------------------------------------------------------------------ // DFA SIMULATION KERNEL INVOCATION //------------------------------------------------------------------------------ template <typename ActivePolicyT, typename DFASimulationKernelT, typename TileStateT, typename FstScanTileStateT, typename StateVectorT> CUB_RUNTIME_FUNCTION __forceinline__ cudaError_t InvokeDFASimulationKernel(DFASimulationKernelT dfa_kernel, int32_t sm_count, StateIndexT seed_state, StateVectorT* d_thread_state_transition, TileStateT tile_state, FstScanTileStateT fst_tile_state) { cudaError_t error = cudaSuccess; cub::KernelConfig dfa_simulation_config; using PolicyT = typename ActivePolicyT::AgentDFAPolicy; if (CubDebug(error = dfa_simulation_config.Init<PolicyT>(dfa_kernel))) return error; // Kernel invocation uint32_t grid_size = std::max( 1u, CUB_QUOTIENT_CEILING(num_chars, PolicyT::BLOCK_THREADS * PolicyT::ITEMS_PER_THREAD)); uint32_t block_threads = dfa_simulation_config.block_threads; dfa_kernel<<<grid_size, block_threads, 0, stream>>>(dfa, d_chars_in, num_chars, seed_state, d_thread_state_transition, tile_state, fst_tile_state, transduced_out_it, transduced_out_idx_it, d_num_transduced_out_it); // Check for errors if (CubDebug(error = cudaPeekAtLastError())) return error; return error; } /** * @brief Computes the state-transition vectors */ template <typename ActivePolicyT, typename TileStateT, typename FstScanTileStateT, typename StateVectorT> CUB_RUNTIME_FUNCTION __forceinline__ cudaError_t ComputeStateTransitionVector(uint32_t sm_count, TileStateT tile_state, FstScanTileStateT fst_tile_state, StateVectorT* d_thread_state_transition) { StateIndexT seed_state = 0; return InvokeDFASimulationKernel<ActivePolicyT>( SimulateDFAKernel<true, SINGLE_PASS_STV, DfaT, TileStateT, typename ActivePolicyT::AgentDFAPolicy, SymbolItT, OffsetT, StateVectorT, FstScanTileStateT, TransducedOutItT, TransducedIndexOutItT, TransducedCountOutItT>, sm_count, seed_state, d_thread_state_transition, tile_state, fst_tile_state); } /** * @brief Performs the actual DFA simulation. */ template <typename ActivePolicyT, typename TileStateT, typename FstScanTileStateT, typename StateVectorT> CUB_RUNTIME_FUNCTION __forceinline__ cudaError_t SimulateDFA(uint32_t sm_count, TileStateT tile_state, FstScanTileStateT fst_tile_state, StateIndexT seed_state, StateVectorT* d_thread_state_transition) { return InvokeDFASimulationKernel<ActivePolicyT>( SimulateDFAKernel<false, SINGLE_PASS_STV, DfaT, TileStateT, typename ActivePolicyT::AgentDFAPolicy, SymbolItT, OffsetT, StateVectorT, FstScanTileStateT, TransducedOutItT, TransducedIndexOutItT, TransducedCountOutItT>, sm_count, seed_state, d_thread_state_transition, tile_state, fst_tile_state); } //------------------------------------------------------------------------------ // POLICY INVOCATION //------------------------------------------------------------------------------ template <typename ActivePolicyT> CUB_RUNTIME_FUNCTION __forceinline__ cudaError_t Invoke() { cudaError_t error = cudaSuccess; // Get SM count int device_ordinal = -1; int sm_count = -1; // Get current device error = cudaGetDevice(&device_ordinal); if (error != cudaSuccess) return error; error = cudaDeviceGetAttribute(&sm_count, cudaDevAttrMultiProcessorCount, device_ordinal); if (error != cudaSuccess) return error; //------------------------------------------------------------------------------ // DERIVED TYPEDEFS //------------------------------------------------------------------------------ // Type used to represent state-transition vectors using StateVectorT = MultiFragmentInRegArray<MAX_NUM_STATES, MAX_NUM_STATES - 1>; // Scan tile state used for propagating composed state transition vectors using ScanTileStateT = typename cub::ScanTileState<StateVectorT>; // Scan tile state used for propagating transduced output offsets using FstScanTileStateT = typename cub::ScanTileState<OffsetT>; // STATE-TRANSITION IDENTITY VECTOR StateVectorT state_identity_vector; for (int32_t i = 0; i < MAX_NUM_STATES; ++i) { state_identity_vector.Set(i, i); } StateVectorCompositeOpT state_vector_scan_op; //------------------------------------------------------------------------------ // DERIVED CONFIGS //------------------------------------------------------------------------------ enum { BLOCK_THREADS = ActivePolicyT::BLOCK_THREADS, SYMBOLS_PER_THREAD = ActivePolicyT::ITEMS_PER_THREAD, NUM_SYMBOLS_PER_BLOCK = BLOCK_THREADS * SYMBOLS_PER_THREAD }; BlockOffsetT num_blocks = std::max(1u, CUB_QUOTIENT_CEILING(num_chars, NUM_SYMBOLS_PER_BLOCK)); size_t num_threads = num_blocks * BLOCK_THREADS; //------------------------------------------------------------------------------ // TEMPORARY MEMORY REQUIREMENTS //------------------------------------------------------------------------------ enum { MEM_STATE_VECTORS = 0, MEM_SCAN, MEM_SINGLE_PASS_STV, MEM_FST_OFFSET, NUM_ALLOCATIONS }; size_t allocation_sizes[NUM_ALLOCATIONS] = {0}; void* allocations[NUM_ALLOCATIONS] = {0}; size_t vector_scan_storage_bytes = 0; // [MEMORY REQUIREMENTS] STATE-TRANSITION SCAN cub::DeviceScan::ExclusiveScan(nullptr, vector_scan_storage_bytes, static_cast<StateVectorT*>(allocations[MEM_STATE_VECTORS]), static_cast<StateVectorT*>(allocations[MEM_STATE_VECTORS]), state_vector_scan_op, state_identity_vector, num_threads, stream); allocation_sizes[MEM_STATE_VECTORS] = num_threads * sizeof(StateVectorT); allocation_sizes[MEM_SCAN] = vector_scan_storage_bytes; // Bytes needed for tile status descriptors (fusing state-transition vector + DFA simulation) if constexpr (SINGLE_PASS_STV) { error = ScanTileStateT::AllocationSize(num_blocks, allocation_sizes[MEM_SINGLE_PASS_STV]); if (error != cudaSuccess) return error; } // Bytes needed for tile status descriptors (DFA simulation pass for output size computation + // output-generating pass) if constexpr (IS_FST) { error = FstScanTileStateT::AllocationSize(num_blocks, allocation_sizes[MEM_FST_OFFSET]); if (error != cudaSuccess) return error; } // Alias the temporary allocations from the single storage blob (or compute the necessary size // of the blob) error = cub::AliasTemporaries(d_temp_storage, temp_storage_bytes, allocations, allocation_sizes); if (error != cudaSuccess) return error; // Return if the caller is simply requesting the size of the storage allocation if (d_temp_storage == NULL) return cudaSuccess; // Alias memory for state-transition vectors StateVectorT* d_thread_state_transition = static_cast<StateVectorT*>(allocations[MEM_STATE_VECTORS]); //------------------------------------------------------------------------------ // INITIALIZE SCAN TILE STATES COMPUTING TRANSDUCED OUTPUT OFFSETS //------------------------------------------------------------------------------ FstScanTileStateT fst_offset_tile_state; if constexpr (IS_FST) { // Construct the tile status (aliases memory internally et al.) error = fst_offset_tile_state.Init( num_blocks, allocations[MEM_FST_OFFSET], allocation_sizes[MEM_FST_OFFSET]); if (error != cudaSuccess) return error; constexpr uint32_t FST_INIT_TPB = 256; uint32_t num_fst_init_blocks = CUB_QUOTIENT_CEILING(num_blocks, FST_INIT_TPB); initialization_pass_kernel<<<num_fst_init_blocks, FST_INIT_TPB, 0, stream>>>( fst_offset_tile_state, num_blocks); } //------------------------------------------------------------------------------ // COMPUTE STATE-TRANSITION VECTORS //------------------------------------------------------------------------------ ScanTileStateT stv_tile_state; if constexpr (SINGLE_PASS_STV) { // Construct the tile status (aliases memory internally et al.) error = stv_tile_state.Init( num_blocks, allocations[MEM_SINGLE_PASS_STV], allocation_sizes[MEM_SINGLE_PASS_STV]); if (error != cudaSuccess) return error; constexpr uint32_t STV_INIT_TPB = 256; uint32_t num_stv_init_blocks = CUB_QUOTIENT_CEILING(num_blocks, STV_INIT_TPB); initialization_pass_kernel<<<num_stv_init_blocks, STV_INIT_TPB, 0, stream>>>(stv_tile_state, num_blocks); } else { // Compute state-transition vectors // TODO tag dispatch or constexpr if depending on single-pass config to avoid superfluous // template instantiations ComputeStateTransitionVector<ActivePolicyT>( sm_count, stv_tile_state, fst_offset_tile_state, d_thread_state_transition); // State-transition vector scan computing using the composition operator cub::DeviceScan::ExclusiveScan(allocations[MEM_SCAN], allocation_sizes[MEM_SCAN], d_thread_state_transition, d_thread_state_transition, state_vector_scan_op, state_identity_vector, num_threads, stream); } //------------------------------------------------------------------------------ // SIMULATE DFA //------------------------------------------------------------------------------ return SimulateDFA<ActivePolicyT>( sm_count, stv_tile_state, fst_offset_tile_state, seed_state, d_thread_state_transition); } }; } // namespace cudf::io::fst::detail
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/fst/in_reg_array.cuh
/* * Copyright (c) 2022-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #pragma once #include <cudf/detail/utilities/integer_utils.hpp> #include <cudf/types.hpp> #include <cub/cub.cuh> #include <cstdint> namespace cudf::io::fst::detail { /** * @brief A bit-packed array of items that can be backed by registers yet allows to be dynamically * addressed at runtime. The data structure is explained in greater detail in the paper <a * href="http://www.vldb.org/pvldb/vol13/p616-stehle.pdf">ParPaRaw: Massively Parallel Parsing of * Delimiter-Separated Raw Data</a>. * * @tparam NUM_ITEMS The maximum number of items this data structure is supposed to store * @tparam MAX_ITEM_VALUE The maximum value that one item can represent * @tparam BackingFragmentT The data type that is holding the fragments */ template <uint32_t NUM_ITEMS, uint32_t MAX_ITEM_VALUE, typename BackingFragmentT = uint32_t> class MultiFragmentInRegArray { private: /// Minimum number of bits required to represent all values from [0, MAX_ITEM_VALUE] static constexpr uint32_t MIN_BITS_PER_ITEM = (MAX_ITEM_VALUE == 0) ? 1 : cub::Log2<(MAX_ITEM_VALUE + 1)>::VALUE; /// Number of bits that each fragment can store static constexpr uint32_t NUM_BITS_PER_FRAGMENT = sizeof(BackingFragmentT) * 8; /// The number of bits per fragment per item in the array static constexpr uint32_t AVAIL_BITS_PER_FRAG_ITEM = NUM_BITS_PER_FRAGMENT / NUM_ITEMS; /// The number of bits per item per fragment to be a power of two to avoid costly integer /// multiplication static constexpr uint32_t BITS_PER_FRAG_ITEM = 0x01U << (cub::Log2<(AVAIL_BITS_PER_FRAG_ITEM + 1)>::VALUE - 1); // The total number of fragments required to store all the items static constexpr uint32_t FRAGMENTS_PER_ITEM = cudf::util::div_rounding_up_safe(MIN_BITS_PER_ITEM, BITS_PER_FRAG_ITEM); //------------------------------------------------------------------------------ // HELPER FUNCTIONS //------------------------------------------------------------------------------ /** * @brief Returns the \p num_bits bits starting at \p bit_start */ CUDF_HOST_DEVICE [[nodiscard]] uint32_t bfe(uint32_t const& data, uint32_t bit_start, uint32_t num_bits) const { #if CUB_PTX_ARCH > 0 return cub::BFE(data, bit_start, num_bits); #else uint32_t const MASK = (1 << num_bits) - 1; return (data >> bit_start) & MASK; #endif } /** * @brief Replaces the \p num_bits bits in \p data starting from \p bit_start with the lower \p * num_bits from \p bits. */ CUDF_HOST_DEVICE void bfi(uint32_t& data, uint32_t bits, uint32_t bit_start, uint32_t num_bits) const { #if CUB_PTX_ARCH > 0 cub::BFI(data, data, bits, bit_start, num_bits); #else uint32_t x = bits << bit_start; uint32_t y = data; uint32_t MASK_X = ((1 << num_bits) - 1) << bit_start; uint32_t MASK_Y = ~MASK_X; data = (y & MASK_Y) | (x & MASK_X); #endif } BackingFragmentT data[FRAGMENTS_PER_ITEM]; //------------------------------------------------------------------------------ // ACCESSORS //------------------------------------------------------------------------------ public: CUDF_HOST_DEVICE [[nodiscard]] uint32_t Get(int32_t index) const { uint32_t val = 0; for (uint32_t i = 0; i < FRAGMENTS_PER_ITEM; ++i) { val = val | bfe(data[i], index * BITS_PER_FRAG_ITEM, BITS_PER_FRAG_ITEM) << (i * BITS_PER_FRAG_ITEM); } return val; } CUDF_HOST_DEVICE void Set(uint32_t index, uint32_t value) { for (uint32_t i = 0; i < FRAGMENTS_PER_ITEM; ++i) { uint32_t frag_bits = bfe(value, i * BITS_PER_FRAG_ITEM, BITS_PER_FRAG_ITEM); bfi(data[i], frag_bits, index * BITS_PER_FRAG_ITEM, BITS_PER_FRAG_ITEM); } } //------------------------------------------------------------------------------ // CONSTRUCTORS //------------------------------------------------------------------------------ CUDF_HOST_DEVICE MultiFragmentInRegArray() { for (uint32_t i = 0; i < FRAGMENTS_PER_ITEM; ++i) { data[i] = 0; } } CUDF_HOST_DEVICE MultiFragmentInRegArray(uint32_t const (&array)[NUM_ITEMS]) { for (uint32_t i = 0; i < NUM_ITEMS; ++i) { Set(i, array[i]); } } }; } // namespace cudf::io::fst::detail
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/fst/agent_dfa.cuh
/* * Copyright (c) 2022-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #pragma once #include "in_reg_array.cuh" #include <cub/cub.cuh> #include <thrust/execution_policy.h> #include <thrust/sequence.h> namespace cudf::io::fst::detail { /// Type used to enumerate (and index) into the states defined by a DFA using StateIndexT = uint32_t; /** * @brief Implements an associative composition operation for state transition vectors to be used * with a prefix scan. * * Read the following table as follows: c = op(l,r), where op is the composition operator. * For row 0: l maps 0 to 2. r maps 2 to 2. Hence, the result for 0 is 2. * For row 1: l maps 1 to 1. r maps 1 to 2. Hence, the result for 1 is 2. * For row 2: l maps 2 to 0. r maps 0 to 1. Hence, the result for 2 is 1. * * l r = c ( s->l->r) * 0: [2] [1] [2] (i.e. 0->2->2) * 1: [1] [2] [2] (i.e. 1->1->2) * 2: [0] [2] [1] (i.e. 2->0->1) * @tparam NUM_ITEMS The number of items stored within a vector */ template <int32_t NUM_ITEMS> struct VectorCompositeOp { template <typename VectorT> __host__ __device__ __forceinline__ VectorT operator()(VectorT const& lhs, VectorT const& rhs) { VectorT res{}; for (int32_t i = 0; i < NUM_ITEMS; ++i) { res.Set(i, rhs.Get(lhs.Get(i))); } return res; } }; /** * @brief A class whose ReadSymbol member function is invoked for each symbol being read from the * input tape. The wrapper class looks up whether a state transition caused by a symbol is supposed * to emit any output symbol (the "transduced" output) and, if so, keeps track of how many symbols * it intends to write out and writing out such symbols to the given output iterators. * * @tparam TransducerTableT The type implementing a transducer table that can be used for looking up * the symbols that are supposed to be emitted on a given state transition. * @tparam TransducedOutItT A Random-access output iterator type to which symbols returned by the * transducer table are assignable. * @tparam TransducedIndexOutItT A Random-access output iterator type to which indexes are written. */ template <typename TransducerTableT, typename TransducedOutItT, typename TransducedIndexOutItT> class DFASimulationCallbackWrapper { public: __host__ __device__ __forceinline__ DFASimulationCallbackWrapper( TransducerTableT transducer_table, TransducedOutItT out_it, TransducedIndexOutItT out_idx_it) : transducer_table(transducer_table), out_it(out_it), out_idx_it(out_idx_it), write(false) { } template <typename OffsetT> __host__ __device__ __forceinline__ void Init(OffsetT const& offset) { this->offset = offset; if (!write) out_count = 0; } template <typename CharIndexT, typename StateIndexT, typename SymbolIndexT, typename SymbolT> __host__ __device__ __forceinline__ void ReadSymbol(CharIndexT const character_index, StateIndexT const old_state, StateIndexT const new_state, SymbolIndexT const symbol_id, SymbolT const read_symbol) { uint32_t const count = transducer_table(old_state, symbol_id, read_symbol); if (write) { #if __CUDA_ARCH__ > 0 #pragma unroll 1 #endif for (uint32_t out_char = 0; out_char < count; out_char++) { out_it[out_count + out_char] = transducer_table(old_state, symbol_id, out_char, read_symbol); out_idx_it[out_count + out_char] = offset + character_index; } } out_count += count; } __host__ __device__ __forceinline__ void TearDown() {} public: TransducerTableT const transducer_table; TransducedOutItT out_it; TransducedIndexOutItT out_idx_it; uint32_t out_count; uint32_t offset; bool write; }; /** * @brief Helper class that transitions the state of multiple DFA instances simultaneously whenever * a symbol is read. * * @tparam NUM_INSTANCES The number of DFA instances to keep track of * @tparam TransitionTableT The transition table type used for looking up the new state for a * current_state and a read_symbol. */ template <int32_t NUM_INSTANCES, typename TransitionTableT> class StateVectorTransitionOp { public: __host__ __device__ __forceinline__ StateVectorTransitionOp( TransitionTableT const& transition_table, std::array<StateIndexT, NUM_INSTANCES>& state_vector) : transition_table(transition_table), state_vector(state_vector) { } template <typename CharIndexT, typename SymbolIndexT, typename SymbolT> __host__ __device__ __forceinline__ void ReadSymbol(CharIndexT const& character_index, SymbolIndexT const& read_symbol_id, SymbolT const& read_symbol) const { for (int32_t i = 0; i < NUM_INSTANCES; ++i) { state_vector[i] = transition_table(state_vector[i], read_symbol_id); } } public: std::array<StateIndexT, NUM_INSTANCES>& state_vector; TransitionTableT const& transition_table; }; template <typename CallbackOpT, typename TransitionTableT> struct StateTransitionOp { StateIndexT state; TransitionTableT const& transition_table; CallbackOpT& callback_op; __host__ __device__ __forceinline__ StateTransitionOp(TransitionTableT const& transition_table, StateIndexT state, CallbackOpT& callback_op) : transition_table(transition_table), state(state), callback_op(callback_op) { } template <typename CharIndexT, typename SymbolIndexT, typename SymbolT> __host__ __device__ __forceinline__ void ReadSymbol(CharIndexT const& character_index, SymbolIndexT const& read_symbol_id, SymbolT const& read_symbol) { // Remember what state we were in before we made the transition StateIndexT previous_state = state; state = transition_table(state, read_symbol_id); callback_op.ReadSymbol(character_index, previous_state, state, read_symbol_id, read_symbol); } }; template <typename AgentDFAPolicy, typename SymbolItT, typename OffsetT> struct AgentDFA { using SymbolIndexT = uint32_t; using AliasedLoadT = uint32_t; using CharT = typename std::iterator_traits<SymbolItT>::value_type; //------------------------------------------------------------------------------ // DERIVED CONFIGS //------------------------------------------------------------------------------ static constexpr uint32_t BLOCK_THREADS = AgentDFAPolicy::BLOCK_THREADS; static constexpr uint32_t ITEMS_PER_THREAD = AgentDFAPolicy::ITEMS_PER_THREAD; // The number of symbols per thread static constexpr uint32_t SYMBOLS_PER_THREAD = ITEMS_PER_THREAD; static constexpr uint32_t SYMBOLS_PER_BLOCK = BLOCK_THREADS * SYMBOLS_PER_THREAD; static constexpr uint32_t MIN_UINTS_PER_BLOCK = CUB_QUOTIENT_CEILING(SYMBOLS_PER_BLOCK, sizeof(AliasedLoadT)); static constexpr uint32_t UINTS_PER_THREAD = CUB_QUOTIENT_CEILING(MIN_UINTS_PER_BLOCK, BLOCK_THREADS); static constexpr uint32_t UINTS_PER_BLOCK = UINTS_PER_THREAD * BLOCK_THREADS; static constexpr uint32_t SYMBOLS_PER_UINT_BLOCK = UINTS_PER_BLOCK * sizeof(AliasedLoadT); //------------------------------------------------------------------------------ // TYPEDEFS //------------------------------------------------------------------------------ struct _TempStorage { // For aliased loading of characters into shared memory union { CharT chars[SYMBOLS_PER_BLOCK]; AliasedLoadT uints[UINTS_PER_BLOCK]; }; }; struct TempStorage : cub::Uninitialized<_TempStorage> {}; //------------------------------------------------------------------------------ // MEMBER VARIABLES //------------------------------------------------------------------------------ _TempStorage& temp_storage; //------------------------------------------------------------------------------ // CONSTRUCTOR //------------------------------------------------------------------------------ __device__ __forceinline__ AgentDFA(TempStorage& temp_storage) : temp_storage(temp_storage.Alias()) { } template <int32_t NUM_SYMBOLS, typename SymbolMatcherT, typename CallbackOpT, int32_t IS_FULL_BLOCK> __device__ __forceinline__ static void ThreadParse(SymbolMatcherT const& symbol_matcher, CharT const* chars, SymbolIndexT const& max_num_chars, CallbackOpT callback_op, cub::Int2Type<IS_FULL_BLOCK> /*IS_FULL_BLOCK*/) { // Iterate over symbols #pragma unroll for (int32_t i = 0; i < NUM_SYMBOLS; ++i) { if (IS_FULL_BLOCK || threadIdx.x * SYMBOLS_PER_THREAD + i < max_num_chars) { auto matched_id = symbol_matcher(chars[i]); callback_op.ReadSymbol(i, matched_id, chars[i]); } } } template <int32_t NUM_SYMBOLS, typename SymbolMatcherT, typename StateTransitionOpT, int32_t IS_FULL_BLOCK> __device__ __forceinline__ void GetThreadStateTransitions( SymbolMatcherT const& symbol_matcher, CharT const* chars, SymbolIndexT const& max_num_chars, StateTransitionOpT& state_transition_op, cub::Int2Type<IS_FULL_BLOCK> /*IS_FULL_BLOCK*/) { ThreadParse<NUM_SYMBOLS>( symbol_matcher, chars, max_num_chars, state_transition_op, cub::Int2Type<IS_FULL_BLOCK>()); } //--------------------------------------------------------------------- // LOADING FULL BLOCK OF CHARACTERS, NON-ALIASED //--------------------------------------------------------------------- template <typename CharInItT> __device__ __forceinline__ void LoadBlock(CharInItT d_chars, OffsetT const block_offset, OffsetT const num_total_symbols, cub::Int2Type<true> /*IS_FULL_BLOCK*/, cub::Int2Type<1> /*ALIGNMENT*/) { CharT thread_chars[SYMBOLS_PER_THREAD]; CharInItT d_block_symbols = d_chars + block_offset; cub::LoadDirectStriped<BLOCK_THREADS>(threadIdx.x, d_block_symbols, thread_chars); #pragma unroll for (int32_t i = 0; i < SYMBOLS_PER_THREAD; ++i) { temp_storage.chars[threadIdx.x + i * BLOCK_THREADS] = thread_chars[i]; } } //--------------------------------------------------------------------- // LOADING PARTIAL BLOCK OF CHARACTERS, NON-ALIASED //--------------------------------------------------------------------- template <typename CharInItT> __device__ __forceinline__ void LoadBlock(CharInItT d_chars, OffsetT const block_offset, OffsetT const num_total_symbols, cub::Int2Type<false> /*IS_FULL_BLOCK*/, cub::Int2Type<1> /*ALIGNMENT*/) { CharT thread_chars[SYMBOLS_PER_THREAD]; if (num_total_symbols <= block_offset) return; // Last unit to be loaded is IDIV_CEIL(#SYM, SYMBOLS_PER_UNIT) OffsetT num_total_chars = num_total_symbols - block_offset; CharInItT d_block_symbols = d_chars + block_offset; cub::LoadDirectStriped<BLOCK_THREADS>( threadIdx.x, d_block_symbols, thread_chars, num_total_chars); #pragma unroll for (int32_t i = 0; i < SYMBOLS_PER_THREAD; ++i) { temp_storage.chars[threadIdx.x + i * BLOCK_THREADS] = thread_chars[i]; } } //--------------------------------------------------------------------- // LOADING FULL BLOCK OF CHARACTERS, ALIASED //--------------------------------------------------------------------- __device__ __forceinline__ void LoadBlock(CharT const* d_chars, OffsetT const block_offset, OffsetT const num_total_symbols, cub::Int2Type<true> /*IS_FULL_BLOCK*/, cub::Int2Type<sizeof(AliasedLoadT)> /*ALIGNMENT*/) { AliasedLoadT thread_units[UINTS_PER_THREAD]; AliasedLoadT const* d_block_symbols = reinterpret_cast<AliasedLoadT const*>(d_chars + block_offset); cub::LoadDirectStriped<BLOCK_THREADS>(threadIdx.x, d_block_symbols, thread_units); #pragma unroll for (int32_t i = 0; i < UINTS_PER_THREAD; ++i) { temp_storage.uints[threadIdx.x + i * BLOCK_THREADS] = thread_units[i]; } } //--------------------------------------------------------------------- // LOADING PARTIAL BLOCK OF CHARACTERS, ALIASED //--------------------------------------------------------------------- __device__ __forceinline__ void LoadBlock(CharT const* d_chars, OffsetT const block_offset, OffsetT const num_total_symbols, cub::Int2Type<false> /*IS_FULL_BLOCK*/, cub::Int2Type<sizeof(AliasedLoadT)> /*ALIGNMENT*/) { AliasedLoadT thread_units[UINTS_PER_THREAD]; if (num_total_symbols <= block_offset) return; // Last unit to be loaded is IDIV_CEIL(#SYM, SYMBOLS_PER_UNIT) OffsetT num_total_units = CUB_QUOTIENT_CEILING(num_total_symbols - block_offset, sizeof(AliasedLoadT)); AliasedLoadT const* d_block_symbols = reinterpret_cast<AliasedLoadT const*>(d_chars + block_offset); cub::LoadDirectStriped<BLOCK_THREADS>( threadIdx.x, d_block_symbols, thread_units, num_total_units); #pragma unroll for (int32_t i = 0; i < UINTS_PER_THREAD; ++i) { temp_storage.uints[threadIdx.x + i * BLOCK_THREADS] = thread_units[i]; } } //--------------------------------------------------------------------- // LOADING BLOCK OF CHARACTERS: DISPATCHER //--------------------------------------------------------------------- __device__ __forceinline__ void LoadBlock(CharT const* d_chars, OffsetT const block_offset, OffsetT const num_total_symbols) { // Check if pointer is aligned to four bytes if (((uintptr_t)(void const*)(d_chars + block_offset) % 4) == 0) { if (block_offset + SYMBOLS_PER_UINT_BLOCK < num_total_symbols) { LoadBlock( d_chars, block_offset, num_total_symbols, cub::Int2Type<true>(), cub::Int2Type<4>()); } else { LoadBlock( d_chars, block_offset, num_total_symbols, cub::Int2Type<false>(), cub::Int2Type<1>()); } } else { if (block_offset + SYMBOLS_PER_UINT_BLOCK < num_total_symbols) { LoadBlock( d_chars, block_offset, num_total_symbols, cub::Int2Type<true>(), cub::Int2Type<1>()); } else { LoadBlock( d_chars, block_offset, num_total_symbols, cub::Int2Type<false>(), cub::Int2Type<1>()); } } } template <typename CharInItT> __device__ __forceinline__ void LoadBlock(CharInItT d_chars, OffsetT const block_offset, OffsetT const num_total_symbols) { // Check if we are loading a full tile of data if (block_offset + SYMBOLS_PER_UINT_BLOCK < num_total_symbols) { LoadBlock( d_chars, block_offset, num_total_symbols, cub::Int2Type<true>(), cub::Int2Type<1>()); } else { LoadBlock( d_chars, block_offset, num_total_symbols, cub::Int2Type<false>(), cub::Int2Type<1>()); } } template <int32_t NUM_STATES, typename SymbolMatcherT, typename TransitionTableT> __device__ __forceinline__ void GetThreadStateTransitionVector( SymbolMatcherT const& symbol_matcher, TransitionTableT const& transition_table, SymbolItT d_chars, OffsetT const block_offset, OffsetT const num_total_symbols, std::array<StateIndexT, NUM_STATES>& state_vector) { using StateVectorTransitionOpT = StateVectorTransitionOp<NUM_STATES, TransitionTableT>; // Start parsing and to transition states StateVectorTransitionOpT transition_op(transition_table, state_vector); // Load characters into shared memory LoadBlock(d_chars, block_offset, num_total_symbols); // If this is a full block (i.e., all threads can parse all their symbols) OffsetT num_block_chars = num_total_symbols - block_offset; bool is_full_block = (num_block_chars >= SYMBOLS_PER_BLOCK); // Ensure characters have been loaded __syncthreads(); // Thread's symbols CharT* t_chars = &temp_storage.chars[threadIdx.x * SYMBOLS_PER_THREAD]; // Parse thread's symbols and transition the state-vector if (is_full_block) { GetThreadStateTransitions<SYMBOLS_PER_THREAD>( symbol_matcher, t_chars, num_block_chars, transition_op, cub::Int2Type<true>()); } else { GetThreadStateTransitions<SYMBOLS_PER_THREAD>( symbol_matcher, t_chars, num_block_chars, transition_op, cub::Int2Type<false>()); } } template <int32_t BYPASS_LOAD, typename SymbolMatcherT, typename TransitionTableT, typename CallbackOpT> __device__ __forceinline__ void GetThreadStateTransitions( SymbolMatcherT const& symbol_matcher, TransitionTableT const& transition_table, SymbolItT d_chars, OffsetT const block_offset, OffsetT const num_total_symbols, StateIndexT& state, CallbackOpT& callback_op, cub::Int2Type<BYPASS_LOAD>) { using StateTransitionOpT = StateTransitionOp<CallbackOpT, TransitionTableT>; // Start parsing and to transition states StateTransitionOpT transition_op(transition_table, state, callback_op); // Load characters into shared memory if (!BYPASS_LOAD) LoadBlock(d_chars, block_offset, num_total_symbols); // If this is a full block (i.e., all threads can parse all their symbols) OffsetT num_block_chars = num_total_symbols - block_offset; bool is_full_block = (num_block_chars >= SYMBOLS_PER_BLOCK); // Ensure characters have been loaded __syncthreads(); // Thread's symbols CharT* t_chars = &temp_storage.chars[threadIdx.x * SYMBOLS_PER_THREAD]; // Initialize callback callback_op.Init(block_offset + threadIdx.x * SYMBOLS_PER_THREAD); // Parse thread's symbols and transition the state-vector if (is_full_block) { GetThreadStateTransitions<SYMBOLS_PER_THREAD>( symbol_matcher, t_chars, num_block_chars, transition_op, cub::Int2Type<true>()); } else { GetThreadStateTransitions<SYMBOLS_PER_THREAD>( symbol_matcher, t_chars, num_block_chars, transition_op, cub::Int2Type<false>()); } callback_op.TearDown(); } }; template <bool IS_TRANS_VECTOR_PASS, bool IS_SINGLE_PASS, typename DfaT, typename TileStateT, typename AgentDFAPolicy, typename SymbolItT, typename OffsetT, typename StateVectorT, typename OutOffsetScanTileState, typename TransducedOutItT, typename TransducedIndexOutItT, typename TransducedCountOutItT> __launch_bounds__(int32_t(AgentDFAPolicy::BLOCK_THREADS)) __global__ void SimulateDFAKernel(DfaT dfa, SymbolItT d_chars, OffsetT const num_chars, StateIndexT seed_state, StateVectorT* __restrict__ d_thread_state_transition, TileStateT tile_state, OutOffsetScanTileState offset_tile_state, TransducedOutItT transduced_out_it, TransducedIndexOutItT transduced_out_idx_it, TransducedCountOutItT d_num_transduced_out_it) { using AgentDfaSimT = AgentDFA<AgentDFAPolicy, SymbolItT, OffsetT>; static constexpr int32_t NUM_STATES = DfaT::MAX_NUM_STATES; constexpr uint32_t BLOCK_THREADS = AgentDFAPolicy::BLOCK_THREADS; constexpr uint32_t SYMBOLS_PER_BLOCK = AgentDfaSimT::SYMBOLS_PER_BLOCK; // Shared memory required by the DFA simulation algorithm __shared__ typename AgentDfaSimT::TempStorage dfa_storage; // Shared memory required by the symbol group lookup table __shared__ typename DfaT::SymbolGroupStorageT symbol_matcher_storage; // Shared memory required by the transition table __shared__ typename DfaT::TransitionTableStorageT transition_table_storage; // Shared memory required by the transducer table __shared__ typename DfaT::TranslationTableStorageT transducer_table_storage; // Initialize symbol group lookup table auto symbol_matcher = dfa.InitSymbolGroupLUT(symbol_matcher_storage); // Initialize transition table auto transition_table = dfa.InitTransitionTable(transition_table_storage); // Initialize transition table auto transducer_table = dfa.InitTranslationTable(transducer_table_storage); // Set up DFA AgentDfaSimT agent_dfa(dfa_storage); // The state transition vector passed on to the second stage of the algorithm StateVectorT out_state_vector; // Stage 1: Compute the state-transition vector if (IS_TRANS_VECTOR_PASS || IS_SINGLE_PASS) { // Keeping track of the state for each of the <NUM_STATES> state machines std::array<StateIndexT, NUM_STATES> state_vector; // Initialize the seed state transition vector with the identity vector thrust::sequence(thrust::seq, std::begin(state_vector), std::end(state_vector)); // Compute the state transition vector agent_dfa.GetThreadStateTransitionVector<NUM_STATES>(symbol_matcher, transition_table, d_chars, blockIdx.x * SYMBOLS_PER_BLOCK, num_chars, state_vector); // Initialize the state transition vector passed on to the second stage #pragma unroll for (int32_t i = 0; i < NUM_STATES; ++i) { out_state_vector.Set(i, state_vector[i]); } // Write out state-transition vector if (!IS_SINGLE_PASS) { d_thread_state_transition[blockIdx.x * BLOCK_THREADS + threadIdx.x] = out_state_vector; } } // Stage 2: Perform FSM simulation if ((!IS_TRANS_VECTOR_PASS) || IS_SINGLE_PASS) { StateIndexT state = 0; //------------------------------------------------------------------------------ // SINGLE-PASS: // -> block-wide inclusive prefix scan on the state transition vector // -> first block/tile: write out block aggregate as the "tile's" inclusive (i.e., the one that // incorporates all preceding blocks/tiles results) //------------------------------------------------------------------------------ if (IS_SINGLE_PASS) { uint32_t tile_idx = blockIdx.x; using StateVectorCompositeOpT = VectorCompositeOp<NUM_STATES>; using PrefixCallbackOpT_ = cub::TilePrefixCallbackOp<StateVectorT, StateVectorCompositeOpT, TileStateT>; using ItemsBlockScan = cub::BlockScan<StateVectorT, BLOCK_THREADS, cub::BlockScanAlgorithm::BLOCK_SCAN_WARP_SCANS>; __shared__ typename ItemsBlockScan::TempStorage scan_temp_storage; __shared__ typename PrefixCallbackOpT_::TempStorage prefix_callback_temp_storage; // STATE-TRANSITION IDENTITY VECTOR StateVectorT state_identity_vector; for (int32_t i = 0; i < NUM_STATES; ++i) { state_identity_vector.Set(i, i); } StateVectorCompositeOpT state_vector_scan_op; // if (tile_idx == 0) { StateVectorT block_aggregate; ItemsBlockScan(scan_temp_storage) .ExclusiveScan(out_state_vector, out_state_vector, state_identity_vector, state_vector_scan_op, block_aggregate); if (threadIdx.x == 0 /*and not IS_LAST_TILE*/) { tile_state.SetInclusive(0, block_aggregate); } } else { auto prefix_op = PrefixCallbackOpT_( tile_state, prefix_callback_temp_storage, state_vector_scan_op, tile_idx); ItemsBlockScan(scan_temp_storage) .ExclusiveScan(out_state_vector, out_state_vector, state_vector_scan_op, prefix_op); } __syncthreads(); state = out_state_vector.Get(seed_state); } else { state = d_thread_state_transition[blockIdx.x * BLOCK_THREADS + threadIdx.x].Get(seed_state); } // Perform finite-state machine simulation, computing size of transduced output DFASimulationCallbackWrapper<decltype(dfa.InitTranslationTable(transducer_table_storage)), TransducedOutItT, TransducedIndexOutItT> callback_wrapper(transducer_table, transduced_out_it, transduced_out_idx_it); StateIndexT t_start_state = state; agent_dfa.GetThreadStateTransitions(symbol_matcher, transition_table, d_chars, blockIdx.x * SYMBOLS_PER_BLOCK, num_chars, state, callback_wrapper, cub::Int2Type<IS_SINGLE_PASS>()); __syncthreads(); using OffsetPrefixScanCallbackOpT_ = cub::TilePrefixCallbackOp<OffsetT, cub::Sum, OutOffsetScanTileState>; using OutOffsetBlockScan = cub::BlockScan<OffsetT, BLOCK_THREADS, cub::BlockScanAlgorithm::BLOCK_SCAN_WARP_SCANS>; __shared__ typename OutOffsetBlockScan::TempStorage scan_temp_storage; __shared__ typename OffsetPrefixScanCallbackOpT_::TempStorage prefix_callback_temp_storage; uint32_t tile_idx = blockIdx.x; if (tile_idx == 0) { OffsetT block_aggregate = 0; OutOffsetBlockScan(scan_temp_storage) .ExclusiveScan(callback_wrapper.out_count, callback_wrapper.out_count, static_cast<OffsetT>(0), cub::Sum{}, block_aggregate); if (threadIdx.x == 0 /*and not IS_LAST_TILE*/) { offset_tile_state.SetInclusive(0, block_aggregate); } if (tile_idx == gridDim.x - 1 && threadIdx.x == 0) { *d_num_transduced_out_it = block_aggregate; } } else { auto prefix_op = OffsetPrefixScanCallbackOpT_( offset_tile_state, prefix_callback_temp_storage, cub::Sum{}, tile_idx); OutOffsetBlockScan(scan_temp_storage) .ExclusiveScan( callback_wrapper.out_count, callback_wrapper.out_count, cub::Sum{}, prefix_op); if (tile_idx == gridDim.x - 1 && threadIdx.x == 0) { *d_num_transduced_out_it = prefix_op.GetInclusivePrefix(); } } callback_wrapper.write = true; agent_dfa.GetThreadStateTransitions(symbol_matcher, transition_table, d_chars, blockIdx.x * SYMBOLS_PER_BLOCK, num_chars, t_start_state, callback_wrapper, cub::Int2Type<true>()); } } } // namespace cudf::io::fst::detail
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/fst/device_dfa.cuh
/* * Copyright (c) 2022, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #pragma once #include "dispatch_dfa.cuh" #include <io/utilities/hostdevice_vector.hpp> #include <cstdint> namespace cudf::io::fst { /** * @brief Uses a deterministic finite automaton to transduce a sequence of symbols from an input * iterator to a sequence of transduced output symbols. * * @tparam DfaT The DFA specification * @tparam SymbolItT Random-access input iterator type to symbols fed into the FST * @tparam TransducedOutItT Random-access output iterator to which the transduced output will be * written * @tparam TransducedIndexOutItT Random-access output iterator type to which the input symbols' * indexes are written. * @tparam TransducedCountOutItT A single-item output iterator type to which the total number of * output symbols is written * @tparam OffsetT A type large enough to index into either of both: (a) the input symbols and (b) * the output symbols * @param[in] d_temp_storage Device-accessible allocation of temporary storage. When NULL, the * required allocation size is written to \p temp_storage_bytes and no work is done. * @param[in,out] temp_storage_bytes Reference to size in bytes of \p d_temp_storage allocation * @param[in] dfa The DFA specifying the number of distinct symbol groups, transition table, and * translation table * @param[in] d_chars_in Random-access input iterator to the beginning of the sequence of input * symbols * @param[in] num_chars The total number of input symbols to process * @param[out] transduced_out_it Random-access output iterator to which the transduced output is * written * @param[out] transduced_out_idx_it Random-access output iterator to which, the index i is written * iff the i-th input symbol caused some output to be written * @param[out] d_num_transduced_out_it A single-item output iterator type to which the total number * of output symbols is written * @param[in] seed_state The DFA's starting state. For streaming DFAs this corresponds to the * "end-state" of the previous invocation of the algorithm. * @param[in] stream CUDA stream to launch kernels within. Default is the null-stream. */ template <typename DfaT, typename SymbolItT, typename TransducedOutItT, typename TransducedIndexOutItT, typename TransducedCountOutItT, typename OffsetT> cudaError_t DeviceTransduce(void* d_temp_storage, size_t& temp_storage_bytes, DfaT dfa, SymbolItT d_chars_in, OffsetT num_chars, TransducedOutItT transduced_out_it, TransducedIndexOutItT transduced_out_idx_it, TransducedCountOutItT d_num_transduced_out_it, uint32_t seed_state = 0, cudaStream_t stream = 0) { using DispatchDfaT = detail::DispatchFSM<DfaT, SymbolItT, TransducedOutItT, TransducedIndexOutItT, TransducedCountOutItT, OffsetT>; return DispatchDfaT::Dispatch(d_temp_storage, temp_storage_bytes, dfa, seed_state, d_chars_in, num_chars, transduced_out_it, transduced_out_idx_it, d_num_transduced_out_it, stream); } } // namespace cudf::io::fst
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/orc/aggregate_orc_metadata.hpp
/* * Copyright (c) 2021-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "orc.hpp" #include <map> #include <optional> #include <vector> namespace cudf::io::orc::detail { /** * @brief Describes a column hierarchy, which may exclude some input columns. */ struct column_hierarchy { // Maps column IDs to the IDs of their children columns using nesting_map = std::map<size_type, std::vector<size_type>>; // Children IDs of each column nesting_map children; // Each element contains column at the given nesting level std::vector<std::vector<orc_column_meta>> levels; column_hierarchy(nesting_map child_map); auto num_levels() const { return levels.size(); } }; /** * @brief In order to support multiple input files/buffers we need to gather * the metadata across all of those input(s). This class provides a place * to aggregate that metadata from all the files. */ class aggregate_orc_metadata { using OrcStripeInfo = std::pair<StripeInformation const*, StripeFooter const*>; /** * @brief Sums up the number of rows of each source */ [[nodiscard]] int64_t calc_num_rows() const; /** * @brief Number of columns in a ORC file. */ [[nodiscard]] size_type calc_num_cols() const; /** * @brief Sums up the number of stripes of each source */ [[nodiscard]] size_type calc_num_stripes() const; public: std::vector<metadata> per_file_metadata; int64_t const num_rows; size_type const num_stripes; bool row_grp_idx_present{true}; aggregate_orc_metadata(std::vector<std::unique_ptr<datasource>> const& sources, rmm::cuda_stream_view stream); [[nodiscard]] auto const& get_schema(int schema_idx) const { return per_file_metadata[0].ff.types[schema_idx]; } auto get_col_type(int col_idx) const { return per_file_metadata[0].ff.types[col_idx]; } [[nodiscard]] auto get_num_rows() const { return num_rows; } auto get_num_cols() const { return per_file_metadata[0].get_num_columns(); } [[nodiscard]] auto get_num_stripes() const { return num_stripes; } [[nodiscard]] auto const& get_types() const { return per_file_metadata[0].ff.types; } [[nodiscard]] int get_row_index_stride() const { return per_file_metadata[0].ff.rowIndexStride; } [[nodiscard]] auto is_row_grp_idx_present() const { return row_grp_idx_present; } /** * @brief Returns the name of the given column from the given source. */ [[nodiscard]] std::string const& column_name(int const source_idx, int const column_id) const { CUDF_EXPECTS(source_idx <= static_cast<int>(per_file_metadata.size()), "Out of range source_idx provided"); return per_file_metadata[source_idx].column_name(column_id); } /** * @brief Returns the full name of the given column from the given source. * * Full name includes ancestor columns' names. */ [[nodiscard]] std::string const& column_path(int const source_idx, int const column_id) const { CUDF_EXPECTS(source_idx <= static_cast<int>(per_file_metadata.size()), "Out of range source_idx provided"); return per_file_metadata[source_idx].column_path(column_id); } /** * @brief Selects the stripes to read, based on the row/stripe selection parameters. * * Stripes are potentially selected from multiple files. */ std::tuple<int64_t, size_type, std::vector<metadata::stripe_source_mapping>> select_stripes( std::vector<std::vector<size_type>> const& user_specified_stripes, uint64_t skip_rows, std::optional<size_type> const& num_rows, rmm::cuda_stream_view stream); /** * @brief Filters ORC file to a selection of columns, based on their paths in the file. * * Paths are in format "grandparent_col.parent_col.child_col", where the root ORC column is * omitted to match the cuDF table hierarchy. * * @param column_paths List of full column names (i.e. paths) to select from the ORC file; * `nullopt` if user did not select columns to read * @return Columns hierarchy - lists of children columns and sorted columns in each nesting level */ column_hierarchy select_columns( std::optional<std::vector<std::string>> const& column_paths) const; }; } // namespace cudf::io::orc::detail
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/orc/orc_field_reader.hpp
/* * Copyright (c) 2020-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #pragma once #include "orc.hpp" #include <string> /** * @file orc_field_reader.hpp * @brief Functors to encapsulate common functionality required to implement * ProtobufReader::read(...) functions */ namespace cudf { namespace io { namespace orc { /** * @brief Functor to run an operator for a specified field. * * The purpose of this functor is to replace a switch case. If the field in * the argument is equal to the field specified in any element of the tuple * of operators then it is run with the byte stream and field type arguments. * * If the field does not match any of the functors then skip_struct_field is * called by the ProtobufReader */ template <int index> struct FunctionSwitchImpl { template <typename... Operator> static inline void run(ProtobufReader* pbr, uint8_t const* end, int const& encoded_field_number, std::tuple<Operator...>& ops) { if (encoded_field_number == std::get<index>(ops).encoded_field_number) { std::get<index>(ops)(pbr, end); } else { FunctionSwitchImpl<index - 1>::run(pbr, end, encoded_field_number, ops); } } }; template <> struct FunctionSwitchImpl<0> { template <typename... Operator> static inline void run(ProtobufReader* pbr, uint8_t const* end, int const& encoded_field_number, std::tuple<Operator...>& ops) { if (encoded_field_number == std::get<0>(ops).encoded_field_number) { std::get<0>(ops)(pbr, end); } else { pbr->skip_struct_field(encoded_field_number & 7); } } }; /** * @brief Function to implement ProtobufReader::read based on the tuple of functors provided. * * Bytes are read from the internal metadata stream and field types are matched up against user * supplied reading functors. If they match then the corresponding values are written to references * pointed to by the functors. */ template <typename T, typename... Operator> inline void ProtobufReader::function_builder(T& s, size_t maxlen, std::tuple<Operator...>& op) { constexpr int index = std::tuple_size<std::tuple<Operator...>>::value - 1; auto* const end = std::min(m_cur + maxlen, m_end); while (m_cur < end) { auto const field = get<uint32_t>(); FunctionSwitchImpl<index>::run(this, end, field, op); } CUDF_EXPECTS(m_cur <= end, "Current pointer to metadata stream is out of bounds"); } } // namespace orc } // namespace io } // namespace cudf
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/orc/orc_gpu.hpp
/* * Copyright (c) 2019-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #pragma once #include "orc.hpp" #include <io/comp/gpuinflate.hpp> #include <io/statistics/statistics.cuh> #include <io/utilities/column_buffer.hpp> #include <cudf/detail/timezone.cuh> #include <cudf/io/orc_types.hpp> #include <cudf/io/types.hpp> #include <cudf/table/table_device_view.cuh> #include <cudf/types.hpp> #include <cudf/utilities/span.hpp> #include <rmm/cuda_stream_view.hpp> #include <cuco/static_map.cuh> namespace cudf { namespace io { namespace orc { namespace gpu { using cudf::detail::device_2dspan; using cudf::detail::host_2dspan; auto constexpr KEY_SENTINEL = size_type{-1}; auto constexpr VALUE_SENTINEL = size_type{-1}; using map_type = cuco::static_map<size_type, size_type>; /** * @brief The alias of `map_type::pair_atomic_type` class. * * Declare this struct by trivial subclassing instead of type aliasing so we can have forward * declaration of this struct somewhere else. */ struct slot_type : public map_type::slot_type {}; struct CompressedStreamInfo { CompressedStreamInfo() = default; explicit constexpr CompressedStreamInfo(uint8_t const* compressed_data_, size_t compressed_size_) : compressed_data(compressed_data_), uncompressed_data(nullptr), compressed_data_size(compressed_size_) { } uint8_t const* compressed_data{}; // [in] base ptr to compressed stream data uint8_t* uncompressed_data{}; // [in] base ptr to uncompressed stream data or NULL if not known yet size_t compressed_data_size{}; // [in] compressed data size for this stream device_span<uint8_t const>* dec_in_ctl{}; // [in] input buffer to decompress device_span<uint8_t>* dec_out_ctl{}; // [in] output buffer to decompress into device_span<compression_result> dec_res{}; // [in] results of decompression device_span<uint8_t const>* copy_in_ctl{}; // [out] input buffer to copy device_span<uint8_t>* copy_out_ctl{}; // [out] output buffer to copy to uint32_t num_compressed_blocks{}; // [in,out] number of entries in decctl(in), number of // compressed blocks(out) uint32_t num_uncompressed_blocks{}; // [in,out] number of entries in dec_in_ctl(in), number of // uncompressed blocks(out) uint64_t max_uncompressed_size{}; // [out] maximum uncompressed data size of stream uint32_t max_uncompressed_block_size{}; // [out] maximum uncompressed size of any block in stream }; enum StreamIndexType { CI_DATA = 0, // Primary data stream CI_DATA2, // Secondary/Length stream CI_PRESENT, // Present stream CI_DICTIONARY, // Dictionary stream CI_INDEX, // Index stream CI_NUM_STREAMS }; /** * @brief Struct to describe a single entry in the global dictionary */ struct DictionaryEntry { uint32_t pos; // Position in data stream uint32_t len; // Length in data stream }; /** * @brief Struct to describe per stripe's column information */ struct ColumnDesc { uint8_t const* streams[CI_NUM_STREAMS]; // ptr to data stream index uint32_t strm_id[CI_NUM_STREAMS]; // stream ids uint32_t strm_len[CI_NUM_STREAMS]; // stream length uint32_t* valid_map_base; // base pointer of valid bit map for this column void* column_data_base; // base pointer of column data uint32_t start_row; // starting row of the stripe uint32_t num_rows; // number of rows in stripe uint32_t column_num_rows; // number of rows in whole column uint32_t num_child_rows; // store number of child rows if it's list column uint32_t num_rowgroups; // number of rowgroups in the chunk uint32_t dictionary_start; // start position in global dictionary uint32_t dict_len; // length of local dictionary uint32_t null_count; // number of null values in this stripe's column uint32_t skip_count; // number of non-null values to skip uint32_t rowgroup_id; // row group position ColumnEncodingKind encoding_kind; // column encoding kind TypeKind type_kind; // column data type uint8_t dtype_len; // data type length (for types that can be mapped to different sizes) type_id dtype_id; // TODO int32_t decimal_scale; // number of fractional decimal digits for decimal type type_id timestamp_type_id; // output timestamp type id (type_id::EMPTY by default) column_validity_info parent_validity_info; // consists of parent column valid_map and null count uint32_t* parent_null_count_prefix_sums; // per-stripe prefix sums of parent column's null count }; /** * @brief Struct to describe a groups of row belonging to a column stripe */ struct RowGroup { uint32_t chunk_id; // Column chunk this entry belongs to uint32_t strm_offset[2]; // Index offset for CI_DATA and CI_DATA2 streams uint16_t run_pos[2]; // Run position for CI_DATA and CI_DATA2 uint32_t num_rows; // number of rows in rowgroup uint32_t start_row; // starting row of the rowgroup uint32_t num_child_rows; // number of rows of children in rowgroup in case of list type }; /** * @brief Struct to describe an encoder data chunk */ struct EncChunk { uint32_t start_row; // start row of this chunk uint32_t num_rows; // number of rows in this chunk uint32_t null_mask_start_row; // adjusted to multiple of 8 uint32_t null_mask_num_rows; // adjusted to multiple of 8 ColumnEncodingKind encoding_kind; // column encoding kind TypeKind type_kind; // column data type uint8_t dtype_len; // data type length int32_t scale; // scale for decimals or timestamps uint32_t* dict_index; // dictionary index from row index uint32_t* dict_data_order; // map from data to sorted data indices uint32_t* decimal_offsets; orc_column_device_view const* column; }; /** * @brief Struct to describe the streams that correspond to a single `EncChunk`. */ struct encoder_chunk_streams { uint8_t* data_ptrs[CI_NUM_STREAMS]; // encoded output int32_t ids[CI_NUM_STREAMS]; // stream id; -1 if stream is not present uint32_t lengths[CI_NUM_STREAMS]; // in: max length, out: actual length }; /** * @brief Struct to describe a column stream within a stripe */ struct StripeStream { uint8_t* data_ptr; // encoded and gathered output size_t bfr_offset; // Offset of this stream in compressed buffer uint32_t stream_size; // Size of stream in bytes uint32_t first_chunk_id; // First chunk of the stripe uint32_t num_chunks; // Number of chunks in the stripe uint32_t column_id; // column index uint32_t first_block; // First compressed block uint8_t stream_type; // Stream index type uint8_t pad[3]; }; /** * @brief Struct to describe a stripe dictionary */ struct stripe_dictionary { // input device_span<slot_type> map_slots; // hash map storage uint32_t column_idx = 0; // column index size_type start_row = 0; // first row in the stripe size_type start_rowgroup = 0; // first rowgroup in the stripe size_type num_rows = 0; // number of rows in the stripe // output device_span<uint32_t> data; // index of elements in the column to include in the dictionary device_span<uint32_t> index; // index into the dictionary for each row in the column device_span<uint32_t> data_order; // map from data to sorted data indices size_type entry_count = 0; // number of entries in the dictionary size_type char_count = 0; // number of characters in the dictionary bool is_enabled = false; // true if dictionary encoding is enabled for this stripe }; /** * @brief Initializes the hash maps storage for dictionary encoding to sentinel values. * * @param dictionaries Dictionary descriptors * @param stream CUDA stream used for device memory operations and kernel launches */ void initialize_dictionary_hash_maps(device_2dspan<stripe_dictionary> dictionaries, rmm::cuda_stream_view stream); /** * @brief Populates the hash maps with unique values from the stripe. * * @param dictionaries Dictionary descriptors * @param columns Pre-order flattened device array of ORC column views * @param stream CUDA stream used for device memory operations and kernel launches */ void populate_dictionary_hash_maps(device_2dspan<stripe_dictionary> dictionaries, device_span<orc_column_device_view const> columns, rmm::cuda_stream_view stream); /** * @brief Stores the indices of the hash map entries in the dictionary data buffer. * * @param dictionaries Dictionary descriptors * @param stream CUDA stream used for device memory operations and kernel launches */ void collect_map_entries(device_2dspan<stripe_dictionary> dictionaries, rmm::cuda_stream_view stream); /** * @brief Stores the corresponding dictionary indices for each row in the column. * * @param dictionaries Dictionary descriptors * @param columns Pre-order flattened device array of ORC column views * @param stream CUDA stream used for device memory operations and kernel launches */ void get_dictionary_indices(device_2dspan<stripe_dictionary> dictionaries, device_span<orc_column_device_view const> columns, rmm::cuda_stream_view stream); constexpr uint32_t encode_block_size = 512; /** * @brief Launches kernel for parsing the compressed stripe data * * @param[in] strm_info List of compressed streams * @param[in] num_streams Number of compressed streams * @param[in] compression_block_size maximum size of compressed blocks (up to 16M) * @param[in] log2maxcr log2 of maximum compression ratio (used to infer max uncompressed size from * compressed size) * @param[in] stream CUDA stream used for device memory operations and kernel launches */ void ParseCompressedStripeData(CompressedStreamInfo* strm_info, int32_t num_streams, uint32_t compression_block_size, uint32_t log2maxcr, rmm::cuda_stream_view stream); /** * @brief Launches kernel for re-assembling decompressed blocks into a single contiguous block * * @param[in] strm_info List of compressed streams * @param[in] num_streams Number of compressed streams * @param[in] stream CUDA stream used for device memory operations and kernel launches */ void PostDecompressionReassemble(CompressedStreamInfo* strm_info, int32_t num_streams, rmm::cuda_stream_view stream); /** * @brief Launches kernel for constructing rowgroup from index streams * * @param[out] row_groups RowGroup device array [rowgroup][column] * @param[in] strm_info List of compressed streams (or NULL if uncompressed) * @param[in] chunks ColumnDesc device array [stripe][column] * @param[in] num_columns Number of columns * @param[in] num_stripes Number of stripes * @param[in] num_rowgroups Number of row groups * @param[in] rowidx_stride Row index stride * @param[in] use_base_stride Whether to use base stride obtained from meta or use the computed * value * @param[in] stream CUDA stream used for device memory operations and kernel launches */ void ParseRowGroupIndex(RowGroup* row_groups, CompressedStreamInfo* strm_info, ColumnDesc* chunks, uint32_t num_columns, uint32_t num_stripes, uint32_t num_rowgroups, uint32_t rowidx_stride, bool use_base_stride, rmm::cuda_stream_view stream); /** * @brief Launches kernel for decoding NULLs and building string dictionary index tables * * @param[in] chunks ColumnDesc device array [stripe][column] * @param[in] global_dictionary Global dictionary device array * @param[in] num_columns Number of columns * @param[in] num_stripes Number of stripes * @param[in] first_row Crop all rows below first_row * @param[in] stream CUDA stream used for device memory operations and kernel launches */ void DecodeNullsAndStringDictionaries(ColumnDesc* chunks, DictionaryEntry* global_dictionary, uint32_t num_columns, uint32_t num_stripes, size_t first_row, rmm::cuda_stream_view stream); /** * @brief Launches kernel for decoding column data * * @param[in] chunks ColumnDesc device array [stripe][column] * @param[in] global_dictionary Global dictionary device array * @param[in] num_columns Number of columns * @param[in] num_stripes Number of stripes * @param[in] first_row Crop all rows below first_row * @param[in] tz_table Timezone translation table * @param[in] tz_len Length of timezone translation table * @param[in] row_groups Optional row index data [rowgroup][column] * @param[in] num_rowgroups Number of row groups in row index data * @param[in] rowidx_stride Row index stride * @param[in] level Current nesting level being processed * @param[out] error_count Number of errors during decode * @param[in] stream CUDA stream used for device memory operations and kernel launches */ void DecodeOrcColumnData(ColumnDesc* chunks, DictionaryEntry* global_dictionary, device_2dspan<RowGroup> row_groups, uint32_t num_columns, uint32_t num_stripes, size_t first_row, table_device_view tz_table, uint32_t num_rowgroups, uint32_t rowidx_stride, size_t level, size_type* error_count, rmm::cuda_stream_view stream); /** * @brief Launches kernel for encoding column data * * @param[in] chunks encoder chunk device array [column][rowgroup] * @param[in, out] streams chunk streams device array [column][rowgroup] * @param[in] stream CUDA stream used for device memory operations and kernel launches */ void EncodeOrcColumnData(device_2dspan<EncChunk const> chunks, device_2dspan<encoder_chunk_streams> streams, rmm::cuda_stream_view stream); /** * @brief Launches kernel for encoding column dictionaries * * @param[in] stripes Stripe dictionaries device array * @param[in] columns Pre-order flattened device array of ORC column views * @param[in] chunks encoder chunk device array [column][rowgroup] * @param[in] num_string_columns Number of string columns * @param[in] num_stripes Number of stripes * @param[in,out] enc_streams chunk streams device array [column][rowgroup] * @param[in] stream CUDA stream used for device memory operations and kernel launches */ void EncodeStripeDictionaries(stripe_dictionary const* stripes, device_span<orc_column_device_view const> columns, device_2dspan<EncChunk const> chunks, uint32_t num_string_columns, uint32_t num_stripes, device_2dspan<encoder_chunk_streams> enc_streams, rmm::cuda_stream_view stream); /** * @brief Launches kernel for compacting chunked column data prior to compression * * @param[in,out] strm_desc StripeStream device array [stripe][stream] * @param[in,out] enc_streams chunk streams device array [column][rowgroup] * @param[in] stream CUDA stream used for device memory operations and kernel launches */ void CompactOrcDataStreams(device_2dspan<StripeStream> strm_desc, device_2dspan<encoder_chunk_streams> enc_streams, rmm::cuda_stream_view stream); /** * @brief Launches kernel(s) for compressing data streams * * @param[in] compressed_data Output compressed blocks * @param[in] num_compressed_blocks Total number of compressed blocks * @param[in] compression Type of compression * @param[in] comp_blk_size Compression block size * @param[in] max_comp_blk_size Max size of any block after compression * @param[in] comp_block_align Required alignment for compressed blocks * @param[in] collect_statistics Whether to collect compression statistics * @param[in,out] strm_desc StripeStream device array [stripe][stream] * @param[in,out] enc_streams chunk streams device array [column][rowgroup] * @param[out] comp_res Per-block compression status * @param[in] stream CUDA stream used for device memory operations and kernel launches * * @return Compression statistics (if requested) */ std::optional<writer_compression_statistics> CompressOrcDataStreams( device_span<uint8_t> compressed_data, uint32_t num_compressed_blocks, CompressionKind compression, uint32_t comp_blk_size, uint32_t max_comp_blk_size, uint32_t comp_block_align, bool collect_statistics, device_2dspan<StripeStream> strm_desc, device_2dspan<encoder_chunk_streams> enc_streams, device_span<compression_result> comp_res, rmm::cuda_stream_view stream); /** * @brief Counts the number of characters in each rowgroup of each string column. * * @param counts Output array of character counts [column][rowgroup] * @param orc_columns Pre-order flattened device array of ORC column views * @param rowgroup_bounds Ranges of rows in each rowgroup [rowgroup][column] * @param str_col_indexes Indexes of string columns in orc_columns * @param stream CUDA stream used for device memory operations and kernel launches */ void rowgroup_char_counts(device_2dspan<size_type> counts, device_span<orc_column_device_view const> orc_columns, device_2dspan<rowgroup_rows const> rowgroup_bounds, device_span<uint32_t const> str_col_indexes, rmm::cuda_stream_view stream); /** * @brief Converts sizes of decimal elements to offsets within the rowgroup. * * @note The conversion is done in-place. After the conversion, the device vectors in \p elem_sizes * hold the offsets. * * @param rg_bounds Ranges of rows in each rowgroup [rowgroup][column] * @param elem_sizes Map between column indexes and decimal element sizes * @param stream CUDA stream used for device memory operations and kernel launches */ void decimal_sizes_to_offsets(device_2dspan<rowgroup_rows const> rg_bounds, std::map<uint32_t, rmm::device_uvector<uint32_t>>& elem_sizes, rmm::cuda_stream_view stream); /** * @brief Launches kernels to initialize statistics collection * * @param[out] groups Statistics groups (rowgroup-level) * @param[in] cols Column descriptors * @param[in] rowgroup_bounds Ranges of rows in each rowgroup [rowgroup][column] * @param[in] stream CUDA stream used for device memory operations and kernel launches */ void orc_init_statistics_groups(statistics_group* groups, stats_column_desc const* cols, device_2dspan<rowgroup_rows const> rowgroup_bounds, rmm::cuda_stream_view stream); /** * @brief Launches kernels to return statistics buffer offsets and sizes * * @param[in,out] groups Statistics merge groups * @param[in] chunks Statistics chunks * @param[in] statistics_count Number of statistics buffers to encode * @param[in] stream CUDA stream used for device memory operations and kernel launches */ void orc_init_statistics_buffersize(statistics_merge_group* groups, statistics_chunk const* chunks, uint32_t statistics_count, rmm::cuda_stream_view stream); /** * @brief Launches kernel to encode statistics in ORC protobuf format * * @param[out] blob_bfr Output buffer for statistics blobs * @param[in,out] groups Statistics merge groups * @param[in,out] chunks Statistics data * @param[in] statistics_count Number of statistics buffers * @param[in] stream CUDA stream used for device memory operations and kernel launches */ void orc_encode_statistics(uint8_t* blob_bfr, statistics_merge_group* groups, statistics_chunk const* chunks, uint32_t statistics_count, rmm::cuda_stream_view stream); /** * @brief Number of set bits in pushdown masks, per rowgroup. * * @param[in] orc_columns Pre-order flattened device array of ORC column views * @param[in] rowgroup_bounds Ranges of rows in each rowgroup [rowgroup][column] * @param[out] set_counts Per rowgroup number of set bits * @param[in] stream CUDA stream used for device memory operations and kernel launches */ void reduce_pushdown_masks(device_span<orc_column_device_view const> orc_columns, device_2dspan<rowgroup_rows const> rowgroup_bounds, device_2dspan<cudf::size_type> set_counts, rmm::cuda_stream_view stream); } // namespace gpu } // namespace orc } // namespace io } // namespace cudf
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/orc/dict_enc.cu
/* * Copyright (c) 2019-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "orc_gpu.hpp" #include <cudf/detail/utilities/integer_utils.hpp> #include <cudf/io/orc_types.hpp> #include <cudf/table/experimental/row_operators.cuh> #include <rmm/cuda_stream_view.hpp> namespace cudf::io::orc::gpu { /** * @brief Counts the number of characters in each rowgroup of each string column. */ __global__ void rowgroup_char_counts_kernel(device_2dspan<size_type> char_counts, device_span<orc_column_device_view const> orc_columns, device_2dspan<rowgroup_rows const> rowgroup_bounds, device_span<uint32_t const> str_col_indexes) { // Index of the column in the `str_col_indexes` array auto const str_col_idx = blockIdx.y; // Index of the column in the `orc_columns` array auto const col_idx = str_col_indexes[str_col_idx]; auto const row_group_idx = blockIdx.x * blockDim.x + threadIdx.x; if (row_group_idx >= rowgroup_bounds.size().first) { return; } auto const& str_col = orc_columns[col_idx]; auto const start_row = rowgroup_bounds[row_group_idx][col_idx].begin + str_col.offset(); auto const num_rows = rowgroup_bounds[row_group_idx][col_idx].size(); auto const& offsets = str_col.child(strings_column_view::offsets_column_index); char_counts[str_col_idx][row_group_idx] = (num_rows == 0) ? 0 : offsets.element<size_type>(start_row + num_rows) - offsets.element<size_type>(start_row); } void rowgroup_char_counts(device_2dspan<size_type> counts, device_span<orc_column_device_view const> orc_columns, device_2dspan<rowgroup_rows const> rowgroup_bounds, device_span<uint32_t const> str_col_indexes, rmm::cuda_stream_view stream) { if (rowgroup_bounds.count() == 0) { return; } auto const num_rowgroups = rowgroup_bounds.size().first; auto const num_str_cols = str_col_indexes.size(); if (num_str_cols == 0) { return; } int block_size = 0; // suggested thread count to use int min_grid_size = 0; // minimum block count required CUDF_CUDA_TRY( cudaOccupancyMaxPotentialBlockSize(&min_grid_size, &block_size, rowgroup_char_counts_kernel)); auto const grid_size = dim3(cudf::util::div_rounding_up_unsafe<unsigned int>(num_rowgroups, block_size), static_cast<unsigned int>(num_str_cols)); rowgroup_char_counts_kernel<<<grid_size, block_size, 0, stream.value()>>>( counts, orc_columns, rowgroup_bounds, str_col_indexes); } template <int block_size> __global__ void __launch_bounds__(block_size) initialize_dictionary_hash_maps_kernel(device_span<stripe_dictionary> dictionaries) { auto const dict_map = dictionaries[blockIdx.x].map_slots; auto const t = threadIdx.x; for (size_type i = 0; i < dict_map.size(); i += block_size) { if (t + i < dict_map.size()) { new (&dict_map[t + i].first) map_type::atomic_key_type{KEY_SENTINEL}; new (&dict_map[t + i].second) map_type::atomic_mapped_type{VALUE_SENTINEL}; } } } struct equality_functor { column_device_view const& col; __device__ bool operator()(size_type lhs_idx, size_type rhs_idx) const { // We don't call this for nulls so this is fine auto const equal = cudf::experimental::row::equality::nan_equal_physical_equality_comparator{}; return equal(col.element<string_view>(lhs_idx), col.element<string_view>(rhs_idx)); } }; struct hash_functor { column_device_view const& col; __device__ auto operator()(size_type idx) const { return cudf::hashing::detail::MurmurHash3_x86_32<string_view>{}(col.element<string_view>(idx)); } }; template <int block_size> __global__ void __launch_bounds__(block_size) populate_dictionary_hash_maps_kernel(device_2dspan<stripe_dictionary> dictionaries, device_span<orc_column_device_view const> columns) { auto const col_idx = blockIdx.x; auto const stripe_idx = blockIdx.y; auto const t = threadIdx.x; auto& dict = dictionaries[col_idx][stripe_idx]; auto const& col = columns[dict.column_idx]; // Make a view of the hash map auto hash_map_mutable = map_type::device_mutable_view(dict.map_slots.data(), dict.map_slots.size(), cuco::empty_key{KEY_SENTINEL}, cuco::empty_value{VALUE_SENTINEL}); auto const hash_fn = hash_functor{col}; auto const equality_fn = equality_functor{col}; auto const start_row = dict.start_row; auto const end_row = dict.start_row + dict.num_rows; size_type entry_count{0}; size_type char_count{0}; // all threads should loop the same number of times for (thread_index_type cur_row = start_row + t; cur_row - t < end_row; cur_row += block_size) { auto const is_valid = cur_row < end_row and col.is_valid(cur_row); if (is_valid) { // insert element at cur_row to hash map and count successful insertions auto const is_unique = hash_map_mutable.insert(std::pair(cur_row, cur_row), hash_fn, equality_fn); if (is_unique) { ++entry_count; char_count += col.element<string_view>(cur_row).size_bytes(); } } // ensure that threads access adjacent rows in each iteration __syncthreads(); } using block_reduce = cub::BlockReduce<size_type, block_size>; __shared__ typename block_reduce::TempStorage reduce_storage; auto const block_entry_count = block_reduce(reduce_storage).Sum(entry_count); __syncthreads(); auto const block_char_count = block_reduce(reduce_storage).Sum(char_count); if (t == 0) { dict.entry_count = block_entry_count; dict.char_count = block_char_count; } } template <int block_size> __global__ void __launch_bounds__(block_size) collect_map_entries_kernel(device_2dspan<stripe_dictionary> dictionaries) { auto const col_idx = blockIdx.x; auto const stripe_idx = blockIdx.y; auto const& dict = dictionaries[col_idx][stripe_idx]; if (not dict.is_enabled) { return; } auto const t = threadIdx.x; auto map = map_type::device_view(dict.map_slots.data(), dict.map_slots.size(), cuco::empty_key{KEY_SENTINEL}, cuco::empty_value{VALUE_SENTINEL}); __shared__ cuda::atomic<size_type, cuda::thread_scope_block> counter; using cuda::std::memory_order_relaxed; if (t == 0) { new (&counter) cuda::atomic<size_type, cuda::thread_scope_block>{0}; } __syncthreads(); for (size_type i = 0; i < dict.map_slots.size(); i += block_size) { if (t + i < dict.map_slots.size()) { auto* slot = reinterpret_cast<map_type::value_type*>(map.begin_slot() + t + i); auto key = slot->first; if (key != KEY_SENTINEL) { auto loc = counter.fetch_add(1, memory_order_relaxed); dict.data[loc] = key; slot->second = loc; } } } } template <int block_size> __global__ void __launch_bounds__(block_size) get_dictionary_indices_kernel(device_2dspan<stripe_dictionary> dictionaries, device_span<orc_column_device_view const> columns) { auto const col_idx = blockIdx.x; auto const stripe_idx = blockIdx.y; auto const& dict = dictionaries[col_idx][stripe_idx]; auto const& col = columns[dict.column_idx]; if (not dict.is_enabled) { return; } auto const t = threadIdx.x; auto const start_row = dict.start_row; auto const end_row = dict.start_row + dict.num_rows; auto const map = map_type::device_view(dict.map_slots.data(), dict.map_slots.size(), cuco::empty_key{KEY_SENTINEL}, cuco::empty_value{VALUE_SENTINEL}); thread_index_type cur_row = start_row + t; while (cur_row < end_row) { if (col.is_valid(cur_row)) { auto const hash_fn = hash_functor{col}; auto const equality_fn = equality_functor{col}; auto const found_slot = map.find(cur_row, hash_fn, equality_fn); cudf_assert(found_slot != map.end() && "Unable to find value in map in dictionary index construction"); if (found_slot != map.end()) { // No need for atomic as this is not going to be modified by any other thread auto const val_ptr = reinterpret_cast<map_type::mapped_type const*>(&found_slot->second); dict.index[cur_row] = *val_ptr; } } cur_row += block_size; } } void initialize_dictionary_hash_maps(device_2dspan<stripe_dictionary> dictionaries, rmm::cuda_stream_view stream) { if (dictionaries.count() == 0) { return; } constexpr int block_size = 1024; initialize_dictionary_hash_maps_kernel<block_size> <<<dictionaries.count(), block_size, 0, stream.value()>>>(dictionaries.flat_view()); } void populate_dictionary_hash_maps(device_2dspan<stripe_dictionary> dictionaries, device_span<orc_column_device_view const> columns, rmm::cuda_stream_view stream) { if (dictionaries.count() == 0) { return; } constexpr int block_size = 256; dim3 const dim_grid(dictionaries.size().first, dictionaries.size().second); populate_dictionary_hash_maps_kernel<block_size> <<<dim_grid, block_size, 0, stream.value()>>>(dictionaries, columns); } void collect_map_entries(device_2dspan<stripe_dictionary> dictionaries, rmm::cuda_stream_view stream) { if (dictionaries.count() == 0) { return; } constexpr int block_size = 1024; dim3 const dim_grid(dictionaries.size().first, dictionaries.size().second); collect_map_entries_kernel<block_size><<<dim_grid, block_size, 0, stream.value()>>>(dictionaries); } void get_dictionary_indices(device_2dspan<stripe_dictionary> dictionaries, device_span<orc_column_device_view const> columns, rmm::cuda_stream_view stream) { if (dictionaries.count() == 0) { return; } constexpr int block_size = 1024; dim3 const dim_grid(dictionaries.size().first, dictionaries.size().second); get_dictionary_indices_kernel<block_size> <<<dim_grid, block_size, 0, stream.value()>>>(dictionaries, columns); } } // namespace cudf::io::orc::gpu
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/orc/orc_field_writer.hpp
/* * Copyright (c) 2020-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #pragma once #include "orc.hpp" #include <numeric> #include <string> /** * @file orc_field_writer.hpp * @brief Struct to encapsulate common functionality required to implement * `ProtobufWriter::write(...)` functions */ namespace cudf { namespace io { namespace orc { struct ProtobufWriter::ProtobufFieldWriter { int struct_size; ProtobufWriter* p; ProtobufFieldWriter(ProtobufWriter* pbw) : struct_size(0), p(pbw) {} /** * @brief Function to write a unsigned integer to the internal buffer */ template <typename T> void field_uint(int field, T const& value) { struct_size += p->put_uint(encode_field_number<T>(field)); struct_size += p->put_uint(static_cast<uint64_t>(value)); } /** * @brief Function to write a vector of unsigned integers to the internal * buffer */ template <typename T> void field_packed_uint(int field, std::vector<T> const& value) { struct_size += p->put_uint(encode_field_number<std::vector<T>>(field)); auto lpos = p->m_buff.size(); p->put_byte(0); auto sz = std::accumulate(value.begin(), value.end(), 0, [p = this->p](size_t sum, auto val) { return sum + p->put_uint(val); }); struct_size += sz + 1; for (; sz > 0x7f; sz >>= 7, struct_size++) p->m_buff.insert(p->m_buff.begin() + (lpos++), static_cast<uint8_t>((sz & 0x7f) | 0x80)); (p->m_buff)[lpos] = static_cast<uint8_t>(sz); } /** * @brief Function to write a blob to the internal buffer */ template <typename T> void field_blob(int field, T const& values) { struct_size += p->put_uint(encode_field_number<T>(field)); struct_size += p->put_uint(values.size()); struct_size += p->put_bytes<typename T::value_type>(values); } /** * @brief Function to write a struct to the internal buffer */ template <typename T> void field_struct(int field, T const& value) { struct_size += p->put_uint(encode_field_number(field, ProtofType::FIXEDLEN)); auto lpos = p->m_buff.size(); p->put_byte(0); auto sz = p->write(value); struct_size += sz + 1; for (; sz > 0x7f; sz >>= 7, struct_size++) p->m_buff.insert(p->m_buff.begin() + (lpos++), static_cast<uint8_t>((sz & 0x7f) | 0x80)); (p->m_buff)[lpos] = static_cast<uint8_t>(sz); } /** * @brief Function to write a vector of strings to the internal buffer */ void field_repeated_string(int field, std::vector<std::string> const& value) { for (auto const& elem : value) field_blob(field, elem); } /** * @brief Function to write a vector of structs to the internal buffer */ template <typename T> void field_repeated_struct(int field, std::vector<T> const& value) { for (auto const& elem : value) field_struct(field, elem); } /** * @brief Function to write a vector of struct blobs to the internal * buffer */ template <typename T> void field_repeated_struct_blob(int field, std::vector<T> const& value) { for (auto const& elem : value) field_blob(field, elem); } /** * @brief Returns the total length of the buffer written */ size_t value() { return struct_size; } }; } // namespace orc } // namespace io } // namespace cudf
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/orc/writer_impl.cu
/* * Copyright (c) 2019-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * @file writer_impl.cu * @brief cuDF-IO ORC writer class implementation */ #include "writer_impl.hpp" #include <io/comp/nvcomp_adapter.hpp> #include <io/statistics/column_statistics.cuh> #include <io/utilities/column_utils.cuh> #include <cudf/detail/iterator.cuh> #include <cudf/detail/null_mask.hpp> #include <cudf/detail/utilities/cuda.cuh> #include <cudf/detail/utilities/pinned_host_vector.hpp> #include <cudf/detail/utilities/stream_pool.hpp> #include <cudf/detail/utilities/vector_factories.hpp> #include <cudf/strings/strings_column_view.hpp> #include <cudf/utilities/bit.hpp> #include <cudf/utilities/span.hpp> #include <rmm/cuda_stream_view.hpp> #include <rmm/device_buffer.hpp> #include <rmm/device_uvector.hpp> #include <thrust/execution_policy.h> #include <thrust/extrema.h> #include <thrust/for_each.h> #include <thrust/functional.h> #include <thrust/host_vector.h> #include <thrust/iterator/counting_iterator.h> #include <thrust/iterator/reverse_iterator.h> #include <thrust/iterator/transform_iterator.h> #include <thrust/optional.h> #include <thrust/pair.h> #include <thrust/reduce.h> #include <thrust/scan.h> #include <thrust/sequence.h> #include <thrust/sort.h> #include <thrust/tabulate.h> #include <thrust/transform.h> #include <cooperative_groups.h> #include <cooperative_groups/memcpy_async.h> #include <cuda/std/climits> #include <cuda/std/limits> #include <algorithm> #include <cstring> #include <numeric> #include <tuple> #include <utility> namespace cudf { namespace io { namespace detail { namespace orc { using namespace cudf::io::orc; using namespace cudf::io; template <typename T> [[nodiscard]] constexpr int varint_size(T val) { auto len = 1u; while (val > 0x7f) { val >>= 7; ++len; } return len; } struct row_group_index_info { int32_t pos = -1; // Position int32_t blk_pos = -1; // Block Position int32_t comp_pos = -1; // Compressed Position int32_t comp_size = -1; // Compressed size }; namespace { /** * @brief Translates ORC compression to nvCOMP compression */ auto to_nvcomp_compression_type(CompressionKind compression_kind) { if (compression_kind == SNAPPY) return nvcomp::compression_type::SNAPPY; if (compression_kind == ZLIB) return nvcomp::compression_type::DEFLATE; if (compression_kind == ZSTD) return nvcomp::compression_type::ZSTD; CUDF_FAIL("Unsupported compression type"); } /** * @brief Translates cuDF compression to ORC compression */ orc::CompressionKind to_orc_compression(compression_type compression) { switch (compression) { case compression_type::AUTO: case compression_type::SNAPPY: return orc::CompressionKind::SNAPPY; case compression_type::ZLIB: return orc::CompressionKind::ZLIB; case compression_type::ZSTD: return orc::CompressionKind::ZSTD; case compression_type::NONE: return orc::CompressionKind::NONE; default: CUDF_FAIL("Unsupported compression type"); } } /** * @brief Returns the block size for a given compression kind. */ constexpr size_t compression_block_size(orc::CompressionKind compression) { if (compression == orc::CompressionKind::NONE) { return 0; } auto const ncomp_type = to_nvcomp_compression_type(compression); auto const nvcomp_limit = nvcomp::is_compression_disabled(ncomp_type) ? std::nullopt : nvcomp::compress_max_allowed_chunk_size(ncomp_type); constexpr size_t max_block_size = 256 * 1024; return std::min(nvcomp_limit.value_or(max_block_size), max_block_size); } /** * @brief Translates cuDF dtype to ORC datatype */ constexpr orc::TypeKind to_orc_type(cudf::type_id id, bool list_column_as_map) { switch (id) { case cudf::type_id::INT8: return TypeKind::BYTE; case cudf::type_id::INT16: return TypeKind::SHORT; case cudf::type_id::INT32: return TypeKind::INT; case cudf::type_id::INT64: return TypeKind::LONG; case cudf::type_id::FLOAT32: return TypeKind::FLOAT; case cudf::type_id::FLOAT64: return TypeKind::DOUBLE; case cudf::type_id::BOOL8: return TypeKind::BOOLEAN; case cudf::type_id::TIMESTAMP_DAYS: return TypeKind::DATE; case cudf::type_id::TIMESTAMP_SECONDS: case cudf::type_id::TIMESTAMP_MICROSECONDS: case cudf::type_id::TIMESTAMP_MILLISECONDS: case cudf::type_id::TIMESTAMP_NANOSECONDS: return TypeKind::TIMESTAMP; case cudf::type_id::STRING: return TypeKind::STRING; case cudf::type_id::DECIMAL32: case cudf::type_id::DECIMAL64: case cudf::type_id::DECIMAL128: return TypeKind::DECIMAL; case cudf::type_id::LIST: return list_column_as_map ? TypeKind::MAP : TypeKind::LIST; case cudf::type_id::STRUCT: return TypeKind::STRUCT; default: return TypeKind::INVALID_TYPE_KIND; } } /** * @brief Translates time unit to nanoscale multiple. */ constexpr int32_t to_clockscale(cudf::type_id timestamp_id) { switch (timestamp_id) { case cudf::type_id::TIMESTAMP_SECONDS: return 9; case cudf::type_id::TIMESTAMP_MILLISECONDS: return 6; case cudf::type_id::TIMESTAMP_MICROSECONDS: return 3; case cudf::type_id::TIMESTAMP_NANOSECONDS: default: return 0; } } /** * @brief Returns the precision of the given decimal type. */ constexpr auto orc_precision(cudf::type_id decimal_id) { using namespace numeric; switch (decimal_id) { case cudf::type_id::DECIMAL32: return cuda::std::numeric_limits<decimal32::rep>::digits10; case cudf::type_id::DECIMAL64: return cuda::std::numeric_limits<decimal64::rep>::digits10; case cudf::type_id::DECIMAL128: return cuda::std::numeric_limits<decimal128::rep>::digits10; default: return 0; } } } // namespace /** * @brief Helper class that adds ORC-specific column info */ class orc_column_view { public: /** * @brief Constructor that extracts out the string position + length pairs * for building dictionaries for string columns */ explicit orc_column_view(uint32_t index, int str_idx, orc_column_view* parent, column_view const& col, column_in_metadata const& metadata) : cudf_column{col}, _index{index}, _str_idx{str_idx}, _is_child{parent != nullptr}, _type_width{cudf::is_fixed_width(col.type()) ? cudf::size_of(col.type()) : 0}, _type_kind{to_orc_type(col.type().id(), metadata.is_map())}, _scale{(_type_kind == TypeKind::DECIMAL) ? -col.type().scale() : to_clockscale(col.type().id())}, _precision{metadata.is_decimal_precision_set() ? metadata.get_decimal_precision() : orc_precision(col.type().id())}, name{metadata.get_name()} { if (metadata.is_nullability_defined()) { nullable_from_metadata = metadata.nullable(); } if (parent != nullptr) { parent->add_child(_index); _parent_index = parent->index(); } if (_type_kind == TypeKind::MAP) { auto const struct_col = col.child(lists_column_view::child_column_index); CUDF_EXPECTS(struct_col.null_count() == 0, "struct column of a MAP column should not have null elements"); CUDF_EXPECTS(struct_col.num_children() == 2, "MAP column must have two child columns"); } } void add_child(uint32_t child_idx) { children.emplace_back(child_idx); } auto type() const noexcept { return cudf_column.type(); } auto is_string() const noexcept { return cudf_column.type().id() == type_id::STRING; } void attach_rowgroup_char_counts(host_span<size_type const> counts) { rowgroup_char_counts = counts; } [[nodiscard]] auto rowgroup_char_count(size_type rg_idx) const { return rowgroup_char_counts[rg_idx]; } [[nodiscard]] auto char_count() const { return std::accumulate(rowgroup_char_counts.begin(), rowgroup_char_counts.end(), size_type{0}); } [[nodiscard]] auto const& decimal_offsets() const { return d_decimal_offsets; } void attach_decimal_offsets(uint32_t* sizes_ptr) { d_decimal_offsets = sizes_ptr; } void attach_stripe_dicts(host_span<gpu::stripe_dictionary const> host_stripe_dicts, device_span<gpu::stripe_dictionary const> dev_stripe_dicts) { stripe_dicts = host_stripe_dicts; d_stripe_dicts = dev_stripe_dicts; } [[nodiscard]] auto const& host_stripe_dict(size_t stripe) const { CUDF_EXPECTS(is_string(), "Stripe dictionary is only present in string columns."); return stripe_dicts[stripe]; } [[nodiscard]] auto const& device_stripe_dicts() const noexcept { return d_stripe_dicts; } // Index in the table [[nodiscard]] uint32_t index() const noexcept { return _index; } // Index in the table, including only string columns [[nodiscard]] uint32_t str_index() const noexcept { return _str_idx; } // Id in the ORC file [[nodiscard]] auto id() const noexcept { return _index + 1; } [[nodiscard]] auto is_child() const noexcept { return _is_child; } auto parent_index() const noexcept { return _parent_index.value(); } auto child_begin() const noexcept { return children.cbegin(); } auto child_end() const noexcept { return children.cend(); } auto num_children() const noexcept { return children.size(); } [[nodiscard]] auto type_width() const noexcept { return _type_width; } auto size() const noexcept { return cudf_column.size(); } auto null_count() const noexcept { return cudf_column.null_count(); } auto null_mask() const noexcept { return cudf_column.null_mask(); } [[nodiscard]] bool nullable() const noexcept { return null_mask() != nullptr; } auto user_defined_nullable() const noexcept { return nullable_from_metadata; } [[nodiscard]] auto scale() const noexcept { return _scale; } [[nodiscard]] auto precision() const noexcept { return _precision; } void set_orc_encoding(ColumnEncodingKind e) noexcept { _encoding_kind = e; } [[nodiscard]] auto orc_kind() const noexcept { return _type_kind; } [[nodiscard]] auto orc_encoding() const noexcept { return _encoding_kind; } [[nodiscard]] std::string_view orc_name() const noexcept { return name; } private: column_view cudf_column; // Identifier within the set of columns uint32_t _index = 0; // Identifier within the set of string columns int _str_idx; bool _is_child = false; // ORC-related members TypeKind _type_kind = INVALID_TYPE_KIND; ColumnEncodingKind _encoding_kind = INVALID_ENCODING_KIND; std::string name; size_t _type_width = 0; int32_t _scale = 0; int32_t _precision = 0; host_span<size_type const> rowgroup_char_counts; host_span<gpu::stripe_dictionary const> stripe_dicts; device_span<gpu::stripe_dictionary const> d_stripe_dicts; // Offsets for encoded decimal elements. Used to enable direct writing of encoded decimal elements // into the output stream. uint32_t* d_decimal_offsets = nullptr; std::optional<bool> nullable_from_metadata; std::vector<uint32_t> children; std::optional<uint32_t> _parent_index; }; size_type orc_table_view::num_rows() const noexcept { return columns.empty() ? 0 : columns.front().size(); } namespace { struct string_length_functor { __device__ inline size_type operator()(int const i) const { // we translate from 0 -> num_chunks * 2 because each statistic has a min and max // string and we need to calculate lengths for both. if (i >= num_chunks * 2) return 0; // min strings are even values, max strings are odd values of i auto const should_copy_min = i % 2 == 0; // index of the chunk auto const idx = i / 2; auto& str_val = should_copy_min ? stripe_stat_chunks[idx].min_value.str_val : stripe_stat_chunks[idx].max_value.str_val; auto const str = stripe_stat_merge[idx].stats_dtype == dtype_string; return str ? str_val.length : 0; } int const num_chunks; statistics_chunk const* stripe_stat_chunks; statistics_merge_group const* stripe_stat_merge; }; __global__ void copy_string_data(char* string_pool, size_type* offsets, statistics_chunk* chunks, statistics_merge_group const* groups) { auto const idx = blockIdx.x / 2; if (groups[idx].stats_dtype == dtype_string) { // min strings are even values, max strings are odd values of i auto const should_copy_min = blockIdx.x % 2 == 0; auto& str_val = should_copy_min ? chunks[idx].min_value.str_val : chunks[idx].max_value.str_val; auto dst = &string_pool[offsets[blockIdx.x]]; auto src = str_val.ptr; for (thread_index_type i = threadIdx.x; i < str_val.length; i += blockDim.x) { dst[i] = src[i]; } if (threadIdx.x == 0) { str_val.ptr = dst; } } } } // namespace void persisted_statistics::persist(int num_table_rows, single_write_mode write_mode, intermediate_statistics&& intermediate_stats, rmm::cuda_stream_view stream) { if (write_mode == single_write_mode::NO) { // persist the strings in the chunks into a string pool and update pointers auto const num_chunks = static_cast<int>(intermediate_stats.stripe_stat_chunks.size()); // min offset and max offset + 1 for total size rmm::device_uvector<size_type> offsets((num_chunks * 2) + 1, stream); auto iter = cudf::detail::make_counting_transform_iterator( 0, string_length_functor{num_chunks, intermediate_stats.stripe_stat_chunks.data(), intermediate_stats.stripe_stat_merge.device_ptr()}); thrust::exclusive_scan(rmm::exec_policy(stream), iter, iter + offsets.size(), offsets.begin()); // pull size back to host auto const total_string_pool_size = offsets.element(num_chunks * 2, stream); if (total_string_pool_size > 0) { rmm::device_uvector<char> string_pool(total_string_pool_size, stream); // offsets describes where in the string pool each string goes. Going with the simple // approach for now, but it is possible something fancier with breaking up each thread into // copying x bytes instead of a single string is the better method since we are dealing in // min/max strings they almost certainly will not be uniform length. copy_string_data<<<num_chunks * 2, 256, 0, stream.value()>>>( string_pool.data(), offsets.data(), intermediate_stats.stripe_stat_chunks.data(), intermediate_stats.stripe_stat_merge.device_ptr()); string_pools.emplace_back(std::move(string_pool)); } } stripe_stat_chunks.emplace_back(std::move(intermediate_stats.stripe_stat_chunks)); stripe_stat_merge.emplace_back(std::move(intermediate_stats.stripe_stat_merge)); stats_dtypes = std::move(intermediate_stats.stats_dtypes); col_types = std::move(intermediate_stats.col_types); num_rows = num_table_rows; } namespace { /** * @brief Gathers stripe information. * * @param columns List of columns * @param rowgroup_bounds Ranges of rows in each rowgroup [rowgroup][column] * @param max_stripe_size Maximum size of each stripe, both in bytes and in rows * @return List of stripe descriptors */ file_segmentation calculate_segmentation(host_span<orc_column_view const> columns, hostdevice_2dvector<rowgroup_rows>&& rowgroup_bounds, stripe_size_limits max_stripe_size) { std::vector<stripe_rowgroups> infos; auto const num_rowgroups = rowgroup_bounds.size().first; size_t stripe_start = 0; size_t stripe_bytes = 0; size_type stripe_rows = 0; for (size_t rg_idx = 0; rg_idx < num_rowgroups; ++rg_idx) { auto const rowgroup_total_bytes = std::accumulate(columns.begin(), columns.end(), 0ul, [&](size_t total_size, auto const& col) { auto const rows = rowgroup_bounds[rg_idx][col.index()].size(); if (col.is_string()) { return total_size + rows + col.rowgroup_char_count(rg_idx); } else { return total_size + col.type_width() * rows; } }); auto const rowgroup_rows_max = std::max_element(rowgroup_bounds[rg_idx].begin(), rowgroup_bounds[rg_idx].end(), [](auto& l, auto& r) { return l.size() < r.size(); }) ->size(); // Check if adding the current rowgroup to the stripe will make the stripe too large or long if ((rg_idx > stripe_start) && (stripe_bytes + rowgroup_total_bytes > max_stripe_size.bytes || stripe_rows + rowgroup_rows_max > max_stripe_size.rows)) { infos.emplace_back(infos.size(), stripe_start, rg_idx - stripe_start); stripe_start = rg_idx; stripe_bytes = 0; stripe_rows = 0; } stripe_bytes += rowgroup_total_bytes; stripe_rows += rowgroup_rows_max; if (rg_idx + 1 == num_rowgroups) { infos.emplace_back(infos.size(), stripe_start, num_rowgroups - stripe_start); } } return {std::move(rowgroup_bounds), std::move(infos)}; } /** * @brief Returns the maximum size of RLE encoded values of an integer type. **/ template <typename T> size_t max_varint_size() { // varint encodes 7 bits in each byte return cudf::util::div_rounding_up_unsafe(sizeof(T) * 8, 7); } constexpr size_t RLE_stream_size(TypeKind kind, size_t count) { using cudf::util::div_rounding_up_unsafe; constexpr auto byte_rle_max_len = 128; switch (kind) { case TypeKind::BOOLEAN: return div_rounding_up_unsafe(count, byte_rle_max_len * 8) * (byte_rle_max_len + 1); case TypeKind::BYTE: return div_rounding_up_unsafe(count, byte_rle_max_len) * (byte_rle_max_len + 1); case TypeKind::SHORT: return div_rounding_up_unsafe(count, gpu::encode_block_size) * (gpu::encode_block_size * max_varint_size<int16_t>() + 2); case TypeKind::FLOAT: case TypeKind::INT: case TypeKind::DATE: return div_rounding_up_unsafe(count, gpu::encode_block_size) * (gpu::encode_block_size * max_varint_size<int32_t>() + 2); case TypeKind::LONG: case TypeKind::DOUBLE: return div_rounding_up_unsafe(count, gpu::encode_block_size) * (gpu::encode_block_size * max_varint_size<int64_t>() + 2); default: CUDF_FAIL("Unsupported ORC type for RLE stream size"); } } auto uncomp_block_alignment(CompressionKind compression_kind) { if (compression_kind == NONE or nvcomp::is_compression_disabled(to_nvcomp_compression_type(compression_kind))) { return 1u; } return 1u << nvcomp::compress_input_alignment_bits(to_nvcomp_compression_type(compression_kind)); } auto comp_block_alignment(CompressionKind compression_kind) { if (compression_kind == NONE or nvcomp::is_compression_disabled(to_nvcomp_compression_type(compression_kind))) { return 1u; } return 1u << nvcomp::compress_output_alignment_bits(to_nvcomp_compression_type(compression_kind)); } /** * @brief Builds up per-column streams. * * @param[in,out] columns List of columns * @param[in] segmentation stripe and rowgroup ranges * @param[in] decimal_column_sizes Sizes of encoded decimal columns * @return List of stream descriptors */ orc_streams create_streams(host_span<orc_column_view> columns, file_segmentation const& segmentation, std::map<uint32_t, size_t> const& decimal_column_sizes, bool enable_dictionary, CompressionKind compression_kind, single_write_mode write_mode) { // 'column 0' row index stream std::vector<Stream> streams{{ROW_INDEX, 0}}; // TODO: Separate index and data streams? // First n + 1 streams are row index streams streams.reserve(columns.size() + 1); std::transform(columns.begin(), columns.end(), std::back_inserter(streams), [](auto const& col) { return Stream{ROW_INDEX, col.id()}; }); std::vector<int32_t> ids(columns.size() * gpu::CI_NUM_STREAMS, -1); std::vector<TypeKind> types(streams.size(), INVALID_TYPE_KIND); for (auto& column : columns) { auto const is_nullable = [&]() -> bool { if (write_mode == single_write_mode::YES) { return column.nullable(); } else { // For chunked write, when not provided nullability, we assume the worst case scenario // that all columns are nullable. auto const chunked_nullable = column.user_defined_nullable().value_or(true); CUDF_EXPECTS(chunked_nullable or column.null_count() == 0, "Mismatch in metadata prescribed nullability and input column. " "Metadata for input column with nulls cannot prescribe nullability = false"); return chunked_nullable; } }(); auto RLE_column_size = [&](TypeKind type_kind) { return std::accumulate( thrust::make_counting_iterator(0ul), thrust::make_counting_iterator(segmentation.num_rowgroups()), 0ul, [&](auto data_size, auto rg_idx) { return data_size + RLE_stream_size(type_kind, segmentation.rowgroups[rg_idx][column.index()].size()); }); }; auto const kind = column.orc_kind(); auto add_stream = [&](gpu::StreamIndexType index_type, StreamKind kind, TypeKind type_kind, size_t size) { auto const max_alignment_padding = uncomp_block_alignment(compression_kind) - 1; const auto base = column.index() * gpu::CI_NUM_STREAMS; ids[base + index_type] = streams.size(); streams.push_back(orc::Stream{ kind, column.id(), (size == 0) ? 0 : size + max_alignment_padding * segmentation.num_rowgroups()}); types.push_back(type_kind); }; auto add_RLE_stream = [&]( gpu::StreamIndexType index_type, StreamKind kind, TypeKind type_kind) { add_stream(index_type, kind, type_kind, RLE_column_size(type_kind)); }; if (is_nullable) { add_RLE_stream(gpu::CI_PRESENT, PRESENT, TypeKind::BOOLEAN); } switch (kind) { case TypeKind::BOOLEAN: case TypeKind::BYTE: add_RLE_stream(gpu::CI_DATA, DATA, kind); column.set_orc_encoding(DIRECT); break; case TypeKind::SHORT: case TypeKind::INT: case TypeKind::LONG: case TypeKind::DATE: add_RLE_stream(gpu::CI_DATA, DATA, kind); column.set_orc_encoding(DIRECT_V2); break; case TypeKind::FLOAT: case TypeKind::DOUBLE: // Pass through if no nulls (no RLE encoding for floating point) add_stream( gpu::CI_DATA, DATA, kind, (column.null_count() != 0) ? RLE_column_size(kind) : 0); column.set_orc_encoding(DIRECT); break; case TypeKind::STRING: { bool enable_dict = enable_dictionary; size_t dict_data_size = 0; size_t dict_strings = 0; size_t dict_lengths_div512 = 0; for (auto const& stripe : segmentation.stripes) { auto const sd = column.host_stripe_dict(stripe.id); enable_dict = (enable_dict && sd.is_enabled); if (enable_dict) { dict_strings += sd.entry_count; dict_lengths_div512 += (sd.entry_count + 0x1ff) >> 9; dict_data_size += sd.char_count; } } size_t const direct_data_size = column.char_count(); if (enable_dict) { uint32_t dict_bits = 0; for (dict_bits = 1; dict_bits < 32; dict_bits <<= 1) { if (dict_strings <= (1ull << dict_bits)) break; } auto const valid_count = column.size() - column.null_count(); dict_data_size += (dict_bits * valid_count + 7) >> 3; } // Decide between direct or dictionary encoding if (enable_dict && dict_data_size < direct_data_size) { add_RLE_stream(gpu::CI_DATA, DATA, TypeKind::INT); add_stream(gpu::CI_DATA2, LENGTH, TypeKind::INT, dict_lengths_div512 * (512 * 4 + 2)); add_stream( gpu::CI_DICTIONARY, DICTIONARY_DATA, TypeKind::CHAR, std::max(dict_data_size, 1ul)); column.set_orc_encoding(DICTIONARY_V2); } else { add_stream(gpu::CI_DATA, DATA, TypeKind::CHAR, std::max<size_t>(direct_data_size, 1)); add_RLE_stream(gpu::CI_DATA2, LENGTH, TypeKind::INT); column.set_orc_encoding(DIRECT_V2); } break; } case TypeKind::TIMESTAMP: add_RLE_stream(gpu::CI_DATA, DATA, TypeKind::LONG); add_RLE_stream(gpu::CI_DATA2, SECONDARY, TypeKind::LONG); column.set_orc_encoding(DIRECT_V2); break; case TypeKind::DECIMAL: // varint values (NO RLE) // data_stream_size = decimal_column_sizes.at(column.index()); add_stream(gpu::CI_DATA, DATA, TypeKind::DECIMAL, decimal_column_sizes.at(column.index())); // scale stream TODO: compute exact size since all elems are equal add_RLE_stream(gpu::CI_DATA2, SECONDARY, TypeKind::INT); column.set_orc_encoding(DIRECT_V2); break; case TypeKind::LIST: case TypeKind::MAP: // no data stream, only lengths add_RLE_stream(gpu::CI_DATA2, LENGTH, TypeKind::INT); column.set_orc_encoding(DIRECT_V2); break; case TypeKind::STRUCT: // Only has the present stream break; default: CUDF_FAIL("Unsupported ORC type kind"); } } return {std::move(streams), std::move(ids), std::move(types)}; } std::vector<std::vector<rowgroup_rows>> calculate_aligned_rowgroup_bounds( orc_table_view const& orc_table, file_segmentation const& segmentation, rmm::cuda_stream_view stream) { if (segmentation.num_rowgroups() == 0) return {}; auto d_pd_set_counts_data = rmm::device_uvector<cudf::size_type>( orc_table.num_columns() * segmentation.num_rowgroups(), stream); auto const d_pd_set_counts = device_2dspan<cudf::size_type>{ d_pd_set_counts_data.data(), segmentation.num_rowgroups(), orc_table.num_columns()}; gpu::reduce_pushdown_masks(orc_table.d_columns, segmentation.rowgroups, d_pd_set_counts, stream); auto aligned_rgs = hostdevice_2dvector<rowgroup_rows>( segmentation.num_rowgroups(), orc_table.num_columns(), stream); CUDF_CUDA_TRY(cudaMemcpyAsync(aligned_rgs.base_device_ptr(), segmentation.rowgroups.base_device_ptr(), aligned_rgs.count() * sizeof(rowgroup_rows), cudaMemcpyDefault, stream.value())); auto const d_stripes = cudf::detail::make_device_uvector_async( segmentation.stripes, stream, rmm::mr::get_current_device_resource()); // One thread per column, per stripe thrust::for_each_n( rmm::exec_policy(stream), thrust::make_counting_iterator(0), orc_table.num_columns() * segmentation.num_stripes(), [columns = device_span<orc_column_device_view const>{orc_table.d_columns}, stripes = device_span<stripe_rowgroups const>{d_stripes}, d_pd_set_counts, out_rowgroups = device_2dspan<rowgroup_rows>{aligned_rgs}] __device__(auto& idx) { uint32_t const col_idx = idx / stripes.size(); // No alignment needed for root columns if (not columns[col_idx].parent_index.has_value()) return; auto const stripe_idx = idx % stripes.size(); auto const stripe = stripes[stripe_idx]; auto const parent_col_idx = columns[col_idx].parent_index.value(); auto const parent_column = columns[parent_col_idx]; auto const stripe_end = stripe.first + stripe.size; auto seek_last_borrow_rg = [&](auto rg_idx, size_type& bits_to_borrow) { auto curr = rg_idx + 1; auto curr_rg_size = [&]() { return parent_column.pushdown_mask != nullptr ? d_pd_set_counts[curr][parent_col_idx] : out_rowgroups[curr][col_idx].size(); }; while (curr < stripe_end and curr_rg_size() <= bits_to_borrow) { // All bits from rowgroup borrowed, make the rowgroup empty out_rowgroups[curr][col_idx].begin = out_rowgroups[curr][col_idx].end; bits_to_borrow -= curr_rg_size(); ++curr; } return curr; }; int previously_borrowed = 0; for (auto rg_idx = stripe.first; rg_idx + 1 < stripe_end; ++rg_idx) { auto& rg = out_rowgroups[rg_idx][col_idx]; if (parent_column.pushdown_mask == nullptr) { // No pushdown mask, all null mask bits will be encoded // Align on rowgroup size (can be misaligned for list children) if (rg.size() % 8) { auto bits_to_borrow = 8 - rg.size() % 8; auto const last_borrow_rg_idx = seek_last_borrow_rg(rg_idx, bits_to_borrow); if (last_borrow_rg_idx == stripe_end) { // Didn't find enough bits to borrow, move the rowgroup end to the stripe end rg.end = out_rowgroups[stripe_end - 1][col_idx].end; // Done with this stripe break; } auto& last_borrow_rg = out_rowgroups[last_borrow_rg_idx][col_idx]; last_borrow_rg.begin += bits_to_borrow; rg.end = last_borrow_rg.begin; // Skip the rowgroups we emptied in the loop rg_idx = last_borrow_rg_idx - 1; } } else { // pushdown mask present; null mask bits w/ set pushdown mask bits will be encoded // Use the number of set bits in pushdown mask as size auto bits_to_borrow = 8 - (d_pd_set_counts[rg_idx][parent_col_idx] - previously_borrowed) % 8; if (bits_to_borrow == 0) { // Didn't borrow any bits for this rowgroup previously_borrowed = 0; continue; } // Find rowgroup in which we finish the search for missing bits auto const last_borrow_rg_idx = seek_last_borrow_rg(rg_idx, bits_to_borrow); if (last_borrow_rg_idx == stripe_end) { // Didn't find enough bits to borrow, move the rowgroup end to the stripe end rg.end = out_rowgroups[stripe_end - 1][col_idx].end; // Done with this stripe break; } auto& last_borrow_rg = out_rowgroups[last_borrow_rg_idx][col_idx]; // First row that does not need to be borrowed auto borrow_end = last_borrow_rg.begin; // Adjust the number of bits to borrow in the next iteration previously_borrowed = bits_to_borrow; // Find word in which we finish the search for missing bits (guaranteed to be available) while (bits_to_borrow != 0) { auto const mask = cudf::detail::get_mask_offset_word( parent_column.pushdown_mask, 0, borrow_end, borrow_end + 32); auto const valid_in_word = __popc(mask); if (valid_in_word > bits_to_borrow) break; bits_to_borrow -= valid_in_word; borrow_end += 32; } // Find the last of the missing bits (guaranteed to be available) while (bits_to_borrow != 0) { if (bit_is_set(parent_column.pushdown_mask, borrow_end)) { --bits_to_borrow; }; ++borrow_end; } last_borrow_rg.begin = borrow_end; rg.end = borrow_end; // Skip the rowgroups we emptied in the loop rg_idx = last_borrow_rg_idx - 1; } } }); aligned_rgs.device_to_host_sync(stream); std::vector<std::vector<rowgroup_rows>> h_aligned_rgs; h_aligned_rgs.reserve(segmentation.num_rowgroups()); std::transform(thrust::make_counting_iterator(0ul), thrust::make_counting_iterator(segmentation.num_rowgroups()), std::back_inserter(h_aligned_rgs), [&](auto idx) -> std::vector<rowgroup_rows> { return {aligned_rgs[idx].begin(), aligned_rgs[idx].end()}; }); return h_aligned_rgs; } struct segmented_valid_cnt_input { bitmask_type const* mask; std::vector<size_type> indices; }; encoded_data encode_columns(orc_table_view const& orc_table, encoder_decimal_info&& dec_chunk_sizes, file_segmentation const& segmentation, orc_streams const& streams, uint32_t uncomp_block_align, rmm::cuda_stream_view stream) { auto const num_columns = orc_table.num_columns(); hostdevice_2dvector<gpu::EncChunk> chunks(num_columns, segmentation.num_rowgroups(), stream); auto const aligned_rowgroups = calculate_aligned_rowgroup_bounds(orc_table, segmentation, stream); // Initialize column chunks' descriptions std::map<size_type, segmented_valid_cnt_input> validity_check_inputs; for (auto const& column : orc_table.columns) { for (auto const& stripe : segmentation.stripes) { for (auto rg_idx_it = stripe.cbegin(); rg_idx_it < stripe.cend(); ++rg_idx_it) { auto const rg_idx = *rg_idx_it; auto& ck = chunks[column.index()][rg_idx]; ck.start_row = segmentation.rowgroups[rg_idx][column.index()].begin; ck.num_rows = segmentation.rowgroups[rg_idx][column.index()].size(); ck.null_mask_start_row = aligned_rowgroups[rg_idx][column.index()].begin; ck.null_mask_num_rows = aligned_rowgroups[rg_idx][column.index()].size(); ck.encoding_kind = column.orc_encoding(); ck.type_kind = column.orc_kind(); auto const is_str_dict = ck.type_kind == TypeKind::STRING and ck.encoding_kind == DICTIONARY_V2; ck.dict_index = is_str_dict ? column.host_stripe_dict(stripe.id).index.data() : nullptr; ck.dict_data_order = is_str_dict ? column.host_stripe_dict(stripe.id).data_order.data() : nullptr; ck.dtype_len = (ck.type_kind == TypeKind::STRING) ? 1 : column.type_width(); ck.scale = column.scale(); ck.decimal_offsets = (ck.type_kind == TypeKind::DECIMAL) ? column.decimal_offsets() : nullptr; } } } chunks.host_to_device_async(stream); // TODO (future): pass columns separately from chunks (to skip this step) // and remove info from chunks that is common for the entire column thrust::for_each_n( rmm::exec_policy(stream), thrust::make_counting_iterator(0ul), chunks.count(), [chunks = device_2dspan<gpu::EncChunk>{chunks}, cols = device_span<orc_column_device_view const>{orc_table.d_columns}] __device__(auto& idx) { auto const col_idx = idx / chunks.size().second; auto const rg_idx = idx % chunks.size().second; chunks[col_idx][rg_idx].column = &cols[col_idx]; }); auto validity_check_indices = [&](size_t col_idx) { std::vector<size_type> indices; for (auto const& stripe : segmentation.stripes) { for (auto rg_idx_it = stripe.cbegin(); rg_idx_it < stripe.cend() - 1; ++rg_idx_it) { auto const& chunk = chunks[col_idx][*rg_idx_it]; indices.push_back(chunk.start_row); indices.push_back(chunk.start_row + chunk.num_rows); } } return indices; }; for (auto const& column : orc_table.columns) { if (column.orc_kind() == TypeKind::BOOLEAN && column.nullable()) { validity_check_inputs[column.index()] = {column.null_mask(), validity_check_indices(column.index())}; } } for (auto& cnt_in : validity_check_inputs) { auto const valid_counts = cudf::detail::segmented_valid_count(cnt_in.second.mask, cnt_in.second.indices, stream); CUDF_EXPECTS( std::none_of(valid_counts.cbegin(), valid_counts.cend(), [](auto valid_count) { return valid_count % 8; }), "There's currently a bug in encoding boolean columns. Suggested workaround is to convert " "to int8 type." " Please see https://github.com/rapidsai/cudf/issues/6763 for more information."); } hostdevice_2dvector<gpu::encoder_chunk_streams> chunk_streams( num_columns, segmentation.num_rowgroups(), stream); // per-stripe, per-stream owning buffers std::vector<std::vector<rmm::device_uvector<uint8_t>>> encoded_data(segmentation.num_stripes()); for (auto const& stripe : segmentation.stripes) { std::generate_n(std::back_inserter(encoded_data[stripe.id]), streams.size(), [stream]() { return rmm::device_uvector<uint8_t>(0, stream); }); for (size_t col_idx = 0; col_idx < num_columns; col_idx++) { for (int strm_type = 0; strm_type < gpu::CI_NUM_STREAMS; ++strm_type) { auto const& column = orc_table.column(col_idx); auto col_streams = chunk_streams[col_idx]; auto const strm_id = streams.id(col_idx * gpu::CI_NUM_STREAMS + strm_type); std::for_each(stripe.cbegin(), stripe.cend(), [&](auto rg_idx) { col_streams[rg_idx].ids[strm_type] = strm_id; col_streams[rg_idx].lengths[strm_type] = 0; }); // Calculate rowgroup sizes and stripe size if (strm_id >= 0) { size_t stripe_size = 0; std::for_each(stripe.cbegin(), stripe.cend(), [&](auto rg_idx) { auto const& ck = chunks[col_idx][rg_idx]; auto& strm = col_streams[rg_idx]; if ((strm_type == gpu::CI_DICTIONARY) || (strm_type == gpu::CI_DATA2 && ck.encoding_kind == DICTIONARY_V2)) { if (rg_idx == *stripe.cbegin()) { auto const stripe_dict = column.host_stripe_dict(stripe.id); strm.lengths[strm_type] = (strm_type == gpu::CI_DICTIONARY) ? stripe_dict.char_count : (((stripe_dict.entry_count + 0x1ff) >> 9) * (512 * 4 + 2)); } else { strm.lengths[strm_type] = 0; } } else if (strm_type == gpu::CI_DATA && ck.type_kind == TypeKind::STRING && ck.encoding_kind == DIRECT_V2) { strm.lengths[strm_type] = std::max(column.rowgroup_char_count(rg_idx), 1); } else if (strm_type == gpu::CI_DATA && streams[strm_id].length == 0 && (ck.type_kind == DOUBLE || ck.type_kind == FLOAT)) { // Pass-through strm.lengths[strm_type] = ck.num_rows * ck.dtype_len; } else if (ck.type_kind == DECIMAL && strm_type == gpu::CI_DATA) { strm.lengths[strm_type] = dec_chunk_sizes.rg_sizes.at(col_idx)[rg_idx]; } else { strm.lengths[strm_type] = RLE_stream_size(streams.type(strm_id), ck.num_rows); } // Allow extra space for alignment stripe_size += strm.lengths[strm_type] + uncomp_block_align - 1; }); encoded_data[stripe.id][strm_id] = rmm::device_uvector<uint8_t>(stripe_size, stream); } // Set offsets for (auto rg_idx_it = stripe.cbegin(); rg_idx_it < stripe.cend(); ++rg_idx_it) { auto const rg_idx = *rg_idx_it; auto const& ck = chunks[col_idx][rg_idx]; auto& strm = col_streams[rg_idx]; if (strm_id < 0 or (strm_type == gpu::CI_DATA && streams[strm_id].length == 0 && (ck.type_kind == DOUBLE || ck.type_kind == FLOAT))) { strm.data_ptrs[strm_type] = nullptr; } else { if ((strm_type == gpu::CI_DICTIONARY) || (strm_type == gpu::CI_DATA2 && ck.encoding_kind == DICTIONARY_V2)) { strm.data_ptrs[strm_type] = encoded_data[stripe.id][strm_id].data(); } else { strm.data_ptrs[strm_type] = (rg_idx_it == stripe.cbegin()) ? encoded_data[stripe.id][strm_id].data() : (col_streams[rg_idx - 1].data_ptrs[strm_type] + col_streams[rg_idx - 1].lengths[strm_type]); } } auto const misalignment = reinterpret_cast<intptr_t>(strm.data_ptrs[strm_type]) % uncomp_block_align; if (misalignment != 0) { strm.data_ptrs[strm_type] += (uncomp_block_align - misalignment); } } } } } chunk_streams.host_to_device_async(stream); if (orc_table.num_rows() > 0) { if (orc_table.num_string_columns() != 0) { auto d_stripe_dict = orc_table.string_column(0).device_stripe_dicts(); gpu::EncodeStripeDictionaries(d_stripe_dict.data(), orc_table.d_columns, chunks, orc_table.num_string_columns(), segmentation.num_stripes(), chunk_streams, stream); } gpu::EncodeOrcColumnData(chunks, chunk_streams, stream); } chunk_streams.device_to_host_sync(stream); return {std::move(encoded_data), std::move(chunk_streams)}; } // TODO: remove StripeInformation from this function and return strm_desc instead /** * @brief Returns stripe information after compacting columns' individual data * chunks into contiguous data streams. * * @param[in] num_index_streams Total number of index streams * @param[in] segmentation stripe and rowgroup ranges * @param[in,out] enc_data ORC per-chunk streams of encoded data * @param[in,out] strm_desc List of stream descriptors [stripe][data_stream] * @param[in] stream CUDA stream used for device memory operations and kernel launches * @return The stripes' information */ std::vector<StripeInformation> gather_stripes(size_t num_index_streams, file_segmentation const& segmentation, encoded_data* enc_data, hostdevice_2dvector<gpu::StripeStream>* strm_desc, rmm::cuda_stream_view stream) { if (segmentation.num_stripes() == 0) { return {}; } // gathered stripes - per-stripe, per-stream (same as encoded_data.data) std::vector<std::vector<rmm::device_uvector<uint8_t>>> gathered_stripes(enc_data->data.size()); for (auto& stripe_data : gathered_stripes) { std::generate_n(std::back_inserter(stripe_data), enc_data->data[0].size(), [&]() { return rmm::device_uvector<uint8_t>(0, stream); }); } std::vector<StripeInformation> stripes(segmentation.num_stripes()); for (auto const& stripe : segmentation.stripes) { for (size_t col_idx = 0; col_idx < enc_data->streams.size().first; col_idx++) { auto const& col_streams = (enc_data->streams)[col_idx]; // Assign stream data of column data stream(s) for (int k = 0; k < gpu::CI_INDEX; k++) { auto const stream_id = col_streams[0].ids[k]; if (stream_id != -1) { auto const actual_stripe_size = std::accumulate( col_streams.begin() + stripe.first, col_streams.begin() + stripe.first + stripe.size, 0ul, [&](auto const& sum, auto const& strm) { return sum + strm.lengths[k]; }); auto const& allocated_stripe_size = enc_data->data[stripe.id][stream_id].size(); CUDF_EXPECTS(allocated_stripe_size >= actual_stripe_size, "Internal ORC writer error: insufficient allocation size for encoded data"); // Allocate buffers of the exact size as encoded data, smaller than the original buffers. // Don't copying the data to exactly sized buffer when only one chunk is present to avoid // performance overhead from the additional copy. When there are multiple chunks, they are // copied anyway, to make them contiguous (i.e. gather them). if (stripe.size > 1 and allocated_stripe_size > actual_stripe_size) { gathered_stripes[stripe.id][stream_id] = rmm::device_uvector<uint8_t>(actual_stripe_size, stream); } auto* ss = &(*strm_desc)[stripe.id][stream_id - num_index_streams]; ss->data_ptr = gathered_stripes[stripe.id][stream_id].data(); ss->stream_size = actual_stripe_size; ss->first_chunk_id = stripe.first; ss->num_chunks = stripe.size; ss->column_id = col_idx; ss->stream_type = k; } } } stripes[stripe.id].numberOfRows = stripe.size == 0 ? 0 : segmentation.rowgroups[stripe.first + stripe.size - 1][0].end - segmentation.rowgroups[stripe.first][0].begin; } strm_desc->host_to_device_async(stream); // TODO: use cub::DeviceMemcpy::Batched gpu::CompactOrcDataStreams(*strm_desc, enc_data->streams, stream); strm_desc->device_to_host_async(stream); enc_data->streams.device_to_host_sync(stream); // move the gathered stripes to encoded_data.data for lifetime management for (auto stripe_id = 0ul; stripe_id < enc_data->data.size(); ++stripe_id) { for (auto stream_id = 0ul; stream_id < enc_data->data[0].size(); ++stream_id) { if (not gathered_stripes[stripe_id][stream_id].is_empty()) enc_data->data[stripe_id][stream_id] = std::move(gathered_stripes[stripe_id][stream_id]); } } return stripes; } void set_stat_desc_leaf_cols(device_span<orc_column_device_view const> columns, device_span<stats_column_desc> stat_desc, rmm::cuda_stream_view stream) { thrust::for_each(rmm::exec_policy(stream), thrust::make_counting_iterator(0ul), thrust::make_counting_iterator(stat_desc.size()), [=] __device__(auto idx) { stat_desc[idx].leaf_column = &columns[idx]; }); } cudf::detail::hostdevice_vector<uint8_t> allocate_and_encode_blobs( cudf::detail::hostdevice_vector<statistics_merge_group>& stats_merge_groups, rmm::device_uvector<statistics_chunk>& stat_chunks, int num_stat_blobs, rmm::cuda_stream_view stream) { // figure out the buffer size needed for protobuf format gpu::orc_init_statistics_buffersize( stats_merge_groups.device_ptr(), stat_chunks.data(), num_stat_blobs, stream); auto max_blobs = stats_merge_groups.element(num_stat_blobs - 1, stream); cudf::detail::hostdevice_vector<uint8_t> blobs(max_blobs.start_chunk + max_blobs.num_chunks, stream); gpu::orc_encode_statistics(blobs.device_ptr(), stats_merge_groups.device_ptr(), stat_chunks.data(), num_stat_blobs, stream); stats_merge_groups.device_to_host_async(stream); blobs.device_to_host_sync(stream); return blobs; } /** * @brief Returns column statistics in an intermediate format. * * @param statistics_freq Frequency of statistics to be included in the output file * @param orc_table Table information to be written * @param segmentation stripe and rowgroup ranges * @param stream CUDA stream used for device memory operations and kernel launches * @return The statistic information */ intermediate_statistics gather_statistic_blobs(statistics_freq const stats_freq, orc_table_view const& orc_table, file_segmentation const& segmentation, rmm::cuda_stream_view stream) { auto const num_rowgroup_blobs = segmentation.rowgroups.count(); auto const num_stripe_blobs = segmentation.num_stripes() * orc_table.num_columns(); auto const are_statistics_enabled = stats_freq != statistics_freq::STATISTICS_NONE; if (not are_statistics_enabled or num_rowgroup_blobs + num_stripe_blobs == 0) { return intermediate_statistics{stream}; } cudf::detail::hostdevice_vector<stats_column_desc> stat_desc(orc_table.num_columns(), stream); cudf::detail::hostdevice_vector<statistics_merge_group> rowgroup_merge(num_rowgroup_blobs, stream); cudf::detail::hostdevice_vector<statistics_merge_group> stripe_merge(num_stripe_blobs, stream); std::vector<statistics_dtype> col_stats_dtypes; std::vector<data_type> col_types; auto rowgroup_stat_merge = rowgroup_merge.host_ptr(); auto stripe_stat_merge = stripe_merge.host_ptr(); for (auto const& column : orc_table.columns) { stats_column_desc* desc = &stat_desc[column.index()]; switch (column.orc_kind()) { case TypeKind::BYTE: desc->stats_dtype = dtype_int8; break; case TypeKind::SHORT: desc->stats_dtype = dtype_int16; break; case TypeKind::INT: desc->stats_dtype = dtype_int32; break; case TypeKind::LONG: desc->stats_dtype = dtype_int64; break; case TypeKind::FLOAT: desc->stats_dtype = dtype_float32; break; case TypeKind::DOUBLE: desc->stats_dtype = dtype_float64; break; case TypeKind::BOOLEAN: desc->stats_dtype = dtype_bool; break; case TypeKind::DATE: desc->stats_dtype = dtype_int32; break; case TypeKind::DECIMAL: desc->stats_dtype = dtype_decimal64; break; case TypeKind::TIMESTAMP: desc->stats_dtype = dtype_timestamp64; break; case TypeKind::STRING: desc->stats_dtype = dtype_string; break; default: desc->stats_dtype = dtype_none; break; } desc->num_rows = column.size(); desc->num_values = column.size(); if (desc->stats_dtype == dtype_timestamp64) { // Timestamp statistics are in milliseconds switch (column.scale()) { case 9: desc->ts_scale = 1000; break; case 6: desc->ts_scale = 0; break; case 3: desc->ts_scale = -1000; break; case 0: desc->ts_scale = -1000000; break; default: desc->ts_scale = 0; break; } } else { desc->ts_scale = 0; } col_stats_dtypes.push_back(desc->stats_dtype); col_types.push_back(column.type()); for (auto const& stripe : segmentation.stripes) { auto& grp = stripe_stat_merge[column.index() * segmentation.num_stripes() + stripe.id]; grp.col_dtype = column.type(); grp.stats_dtype = desc->stats_dtype; grp.start_chunk = static_cast<uint32_t>(column.index() * segmentation.num_rowgroups() + stripe.first); grp.num_chunks = stripe.size; for (auto rg_idx_it = stripe.cbegin(); rg_idx_it != stripe.cend(); ++rg_idx_it) { auto& rg_grp = rowgroup_stat_merge[column.index() * segmentation.num_rowgroups() + *rg_idx_it]; rg_grp.col_dtype = column.type(); rg_grp.stats_dtype = desc->stats_dtype; rg_grp.start_chunk = *rg_idx_it; rg_grp.num_chunks = 1; } } } stat_desc.host_to_device_async(stream); rowgroup_merge.host_to_device_async(stream); stripe_merge.host_to_device_async(stream); set_stat_desc_leaf_cols(orc_table.d_columns, stat_desc, stream); // The rowgroup stat chunks are written out in each stripe. The stripe and file-level chunks are // written in the footer. To prevent persisting the rowgroup stat chunks across multiple write // calls in a chunked write situation, these allocations are split up so stripe data can persist // until the footer is written and rowgroup data can be freed after being written to the stripe. rmm::device_uvector<statistics_chunk> rowgroup_chunks(num_rowgroup_blobs, stream); rmm::device_uvector<statistics_chunk> stripe_chunks(num_stripe_blobs, stream); auto rowgroup_stat_chunks = rowgroup_chunks.data(); auto stripe_stat_chunks = stripe_chunks.data(); rmm::device_uvector<statistics_group> rowgroup_groups(num_rowgroup_blobs, stream); gpu::orc_init_statistics_groups( rowgroup_groups.data(), stat_desc.device_ptr(), segmentation.rowgroups, stream); detail::calculate_group_statistics<detail::io_file_format::ORC>( rowgroup_chunks.data(), rowgroup_groups.data(), num_rowgroup_blobs, stream); detail::merge_group_statistics<detail::io_file_format::ORC>( stripe_stat_chunks, rowgroup_stat_chunks, stripe_merge.device_ptr(), num_stripe_blobs, stream); // With chunked writes, the orc table can be deallocated between write calls. // This forces our hand to encode row groups and stripes only in this stage and further // we have to persist any data from the table that we need later. The // minimum and maximum string inside the `str_val` structure inside `statistics_val` in // `statistic_chunk` that are copies of the largest and smallest strings in the row group, // or stripe need to be persisted between write calls. We write rowgroup data with each // stripe and then save each stripe's stats until the end where we merge those all together // to get the file-level stats. // Skip rowgroup blobs when encoding, if chosen granularity is coarser than "ROW_GROUP". auto const is_granularity_rowgroup = stats_freq == ORC_STATISTICS_ROW_GROUP; // we have to encode the row groups now IF they are being written out auto rowgroup_blobs = [&]() -> std::vector<ColStatsBlob> { if (not is_granularity_rowgroup) { return {}; } cudf::detail::hostdevice_vector<uint8_t> blobs = allocate_and_encode_blobs(rowgroup_merge, rowgroup_chunks, num_rowgroup_blobs, stream); std::vector<ColStatsBlob> rowgroup_blobs(num_rowgroup_blobs); for (size_t i = 0; i < num_rowgroup_blobs; i++) { auto const stat_begin = blobs.host_ptr(rowgroup_merge[i].start_chunk); auto const stat_end = stat_begin + rowgroup_merge[i].num_chunks; rowgroup_blobs[i].assign(stat_begin, stat_end); } return rowgroup_blobs; }(); return {std::move(rowgroup_blobs), std::move(stripe_chunks), std::move(stripe_merge), std::move(col_stats_dtypes), std::move(col_types)}; } /** * @brief Returns column statistics encoded in ORC protobuf format stored in the footer. * * @param num_stripes number of stripes in the data * @param incoming_stats intermediate statistics returned from `gather_statistic_blobs` * @param stream CUDA stream used for device memory operations and kernel launches * @return The encoded statistic blobs */ encoded_footer_statistics finish_statistic_blobs(int num_stripes, persisted_statistics& per_chunk_stats, rmm::cuda_stream_view stream) { auto stripe_size_iter = thrust::make_transform_iterator(per_chunk_stats.stripe_stat_merge.begin(), [](auto const& i) { return i.size(); }); auto const num_columns = per_chunk_stats.col_types.size(); auto const num_stripe_blobs = thrust::reduce(stripe_size_iter, stripe_size_iter + per_chunk_stats.stripe_stat_merge.size()); auto const num_file_blobs = num_columns; auto const num_blobs = static_cast<int>(num_stripe_blobs + num_file_blobs); if (num_stripe_blobs == 0) { return {}; } // merge the stripe persisted data and add file data rmm::device_uvector<statistics_chunk> stat_chunks(num_blobs, stream); cudf::detail::hostdevice_vector<statistics_merge_group> stats_merge(num_blobs, stream); // we need to merge the stat arrays from the persisted data. // this needs to be done carefully because each array can contain // a different number of stripes and stripes from each column must be // located next to each other. We know the total number of stripes and // we know the size of each array. The number of stripes per column in a chunk array can // be calculated by dividing the number of chunks by the number of columns. // That many chunks need to be copied at a time to the proper destination. size_t num_entries_seen = 0; for (size_t i = 0; i < per_chunk_stats.stripe_stat_chunks.size(); ++i) { auto const stripes_per_col = per_chunk_stats.stripe_stat_chunks[i].size() / num_columns; auto const chunk_bytes = stripes_per_col * sizeof(statistics_chunk); auto const merge_bytes = stripes_per_col * sizeof(statistics_merge_group); for (size_t col = 0; col < num_columns; ++col) { CUDF_CUDA_TRY( cudaMemcpyAsync(stat_chunks.data() + (num_stripes * col) + num_entries_seen, per_chunk_stats.stripe_stat_chunks[i].data() + col * stripes_per_col, chunk_bytes, cudaMemcpyDefault, stream.value())); CUDF_CUDA_TRY( cudaMemcpyAsync(stats_merge.device_ptr() + (num_stripes * col) + num_entries_seen, per_chunk_stats.stripe_stat_merge[i].device_ptr() + col * stripes_per_col, merge_bytes, cudaMemcpyDefault, stream.value())); } num_entries_seen += stripes_per_col; } std::vector<statistics_merge_group> file_stats_merge(num_file_blobs); for (auto i = 0u; i < num_file_blobs; ++i) { auto col_stats = &file_stats_merge[i]; col_stats->col_dtype = per_chunk_stats.col_types[i]; col_stats->stats_dtype = per_chunk_stats.stats_dtypes[i]; col_stats->start_chunk = static_cast<uint32_t>(i * num_stripes); col_stats->num_chunks = static_cast<uint32_t>(num_stripes); } auto d_file_stats_merge = stats_merge.device_ptr(num_stripe_blobs); CUDF_CUDA_TRY(cudaMemcpyAsync(d_file_stats_merge, file_stats_merge.data(), num_file_blobs * sizeof(statistics_merge_group), cudaMemcpyDefault, stream.value())); auto file_stat_chunks = stat_chunks.data() + num_stripe_blobs; detail::merge_group_statistics<detail::io_file_format::ORC>( file_stat_chunks, stat_chunks.data(), d_file_stats_merge, num_file_blobs, stream); cudf::detail::hostdevice_vector<uint8_t> blobs = allocate_and_encode_blobs(stats_merge, stat_chunks, num_blobs, stream); auto stripe_stat_merge = stats_merge.host_ptr(); std::vector<ColStatsBlob> stripe_blobs(num_stripe_blobs); for (size_t i = 0; i < num_stripe_blobs; i++) { auto const stat_begin = blobs.host_ptr(stripe_stat_merge[i].start_chunk); auto const stat_end = stat_begin + stripe_stat_merge[i].num_chunks; stripe_blobs[i].assign(stat_begin, stat_end); } std::vector<ColStatsBlob> file_blobs(num_file_blobs); auto file_stat_merge = stats_merge.host_ptr(num_stripe_blobs); for (auto i = 0u; i < num_file_blobs; i++) { auto const stat_begin = blobs.host_ptr(file_stat_merge[i].start_chunk); auto const stat_end = stat_begin + file_stat_merge[i].num_chunks; file_blobs[i].assign(stat_begin, stat_end); } return {std::move(stripe_blobs), std::move(file_blobs)}; } /** * @brief Writes the specified column's row index stream. * * @param[in] stripe_id Stripe's identifier * @param[in] stream_id Stream identifier (column id + 1) * @param[in] columns List of columns * @param[in] segmentation stripe and rowgroup ranges * @param[in] enc_streams List of encoder chunk streams [column][rowgroup] * @param[in] strm_desc List of stream descriptors * @param[in] comp_res Output status for compressed streams * @param[in] rg_stats row group level statistics * @param[in,out] stripe Stream's parent stripe * @param[in,out] streams List of all streams * @param[in] compression_kind The compression kind * @param[in] compression_blocksize The block size used for compression * @param[in] out_sink Sink for writing data */ void write_index_stream(int32_t stripe_id, int32_t stream_id, host_span<orc_column_view const> columns, file_segmentation const& segmentation, host_2dspan<gpu::encoder_chunk_streams const> enc_streams, host_2dspan<gpu::StripeStream const> strm_desc, host_span<compression_result const> comp_res, host_span<ColStatsBlob const> rg_stats, StripeInformation* stripe, orc_streams* streams, CompressionKind compression_kind, size_t compression_blocksize, std::unique_ptr<data_sink> const& out_sink) { row_group_index_info present; row_group_index_info data; row_group_index_info data2; auto const column_id = stream_id - 1; auto find_record = [=, &strm_desc](gpu::encoder_chunk_streams const& stream, gpu::StreamIndexType type) { row_group_index_info record; if (stream.ids[type] > 0) { record.pos = 0; if (compression_kind != NONE) { auto const& ss = strm_desc[stripe_id][stream.ids[type] - (columns.size() + 1)]; record.blk_pos = ss.first_block; record.comp_pos = 0; record.comp_size = ss.stream_size; } } return record; }; auto scan_record = [=, &comp_res](gpu::encoder_chunk_streams const& stream, gpu::StreamIndexType type, row_group_index_info& record) { if (record.pos >= 0) { record.pos += stream.lengths[type]; while ((record.pos >= 0) && (record.blk_pos >= 0) && (static_cast<size_t>(record.pos) >= compression_blocksize) && (record.comp_pos + block_header_size + comp_res[record.blk_pos].bytes_written < static_cast<size_t>(record.comp_size))) { record.pos -= compression_blocksize; record.comp_pos += block_header_size + comp_res[record.blk_pos].bytes_written; record.blk_pos += 1; } } }; auto kind = TypeKind::STRUCT; // TBD: Not sure we need an empty index stream for column 0 if (stream_id != 0) { auto const& strm = enc_streams[column_id][0]; present = find_record(strm, gpu::CI_PRESENT); data = find_record(strm, gpu::CI_DATA); data2 = find_record(strm, gpu::CI_DATA2); // Change string dictionary to int from index point of view kind = columns[column_id].orc_kind(); if (kind == TypeKind::STRING && columns[column_id].orc_encoding() == DICTIONARY_V2) { kind = TypeKind::INT; } } ProtobufWriter pbw((compression_kind != NONE) ? 3 : 0); // Add row index entries auto const& rowgroups_range = segmentation.stripes[stripe_id]; std::for_each(rowgroups_range.cbegin(), rowgroups_range.cend(), [&](auto rowgroup) { pbw.put_row_index_entry(present.comp_pos, present.pos, data.comp_pos, data.pos, data2.comp_pos, data2.pos, kind, (rg_stats.empty() or stream_id == 0) ? nullptr : (&rg_stats[column_id * segmentation.num_rowgroups() + rowgroup])); if (stream_id != 0) { const auto& strm = enc_streams[column_id][rowgroup]; scan_record(strm, gpu::CI_PRESENT, present); scan_record(strm, gpu::CI_DATA, data); scan_record(strm, gpu::CI_DATA2, data2); } }); (*streams)[stream_id].length = pbw.size(); if (compression_kind != NONE) { uint32_t uncomp_ix_len = (uint32_t)((*streams)[stream_id].length - 3) * 2 + 1; pbw.buffer()[0] = static_cast<uint8_t>(uncomp_ix_len >> 0); pbw.buffer()[1] = static_cast<uint8_t>(uncomp_ix_len >> 8); pbw.buffer()[2] = static_cast<uint8_t>(uncomp_ix_len >> 16); } out_sink->host_write(pbw.data(), pbw.size()); stripe->indexLength += pbw.size(); } /** * @brief Write the specified column's data streams * * @param[in] strm_desc Stream's descriptor * @param[in] enc_stream Chunk's streams * @param[in] compressed_data Compressed stream data * @param[in,out] stream_out Temporary host output buffer * @param[in,out] stripe Stream's parent stripe * @param[in,out] streams List of all streams * @param[in] compression_kind The compression kind * @param[in] out_sink Sink for writing data * @param[in] stream CUDA stream used for device memory operations and kernel launches * @return An std::future that should be synchronized to ensure the writing is complete */ std::future<void> write_data_stream(gpu::StripeStream const& strm_desc, gpu::encoder_chunk_streams const& enc_stream, uint8_t const* compressed_data, uint8_t* stream_out, StripeInformation* stripe, orc_streams* streams, CompressionKind compression_kind, std::unique_ptr<data_sink> const& out_sink, rmm::cuda_stream_view stream) { auto const length = strm_desc.stream_size; (*streams)[enc_stream.ids[strm_desc.stream_type]].length = length; if (length == 0) { return std::async(std::launch::deferred, [] {}); } auto const* stream_in = (compression_kind == NONE) ? enc_stream.data_ptrs[strm_desc.stream_type] : (compressed_data + strm_desc.bfr_offset); auto write_task = [&]() { if (out_sink->is_device_write_preferred(length)) { return out_sink->device_write_async(stream_in, length, stream); } else { CUDF_CUDA_TRY( cudaMemcpyAsync(stream_out, stream_in, length, cudaMemcpyDefault, stream.value())); stream.synchronize(); out_sink->host_write(stream_out, length); return std::async(std::launch::deferred, [] {}); } }(); stripe->dataLength += length; return write_task; } /** * @brief Insert 3-byte uncompressed block headers in a byte vector * * @param compression_kind The compression kind * @param compression_blocksize The block size used for compression * @param v The destitation byte vector to write, which must include initial 3-byte header */ void add_uncompressed_block_headers(CompressionKind compression_kind, size_t compression_blocksize, std::vector<uint8_t>& v) { if (compression_kind != NONE) { size_t uncomp_len = v.size() - 3, pos = 0, block_len; while (uncomp_len > compression_blocksize) { block_len = compression_blocksize * 2 + 1; v[pos + 0] = static_cast<uint8_t>(block_len >> 0); v[pos + 1] = static_cast<uint8_t>(block_len >> 8); v[pos + 2] = static_cast<uint8_t>(block_len >> 16); pos += 3 + compression_blocksize; v.insert(v.begin() + pos, 3, 0); uncomp_len -= compression_blocksize; } block_len = uncomp_len * 2 + 1; v[pos + 0] = static_cast<uint8_t>(block_len >> 0); v[pos + 1] = static_cast<uint8_t>(block_len >> 8); v[pos + 2] = static_cast<uint8_t>(block_len >> 16); } } void pushdown_lists_null_mask(orc_column_view const& col, device_span<orc_column_device_view> d_columns, bitmask_type const* parent_pd_mask, device_span<bitmask_type> out_mask, rmm::cuda_stream_view stream) { // Set all bits - correct unless there's a mismatch between offsets and null mask CUDF_CUDA_TRY(cudaMemsetAsync(static_cast<void*>(out_mask.data()), 255, out_mask.size() * sizeof(bitmask_type), stream.value())); // Reset bits where a null list element has rows in the child column thrust::for_each_n( rmm::exec_policy(stream), thrust::make_counting_iterator(0u), col.size(), [d_columns, col_idx = col.index(), parent_pd_mask, out_mask] __device__(auto& idx) { auto const d_col = d_columns[col_idx]; auto const is_row_valid = d_col.is_valid(idx) and bit_value_or(parent_pd_mask, idx, true); if (not is_row_valid) { auto offsets = d_col.child(lists_column_view::offsets_column_index); auto const child_rows_begin = offsets.element<size_type>(idx + d_col.offset()); auto const child_rows_end = offsets.element<size_type>(idx + 1 + d_col.offset()); for (auto child_row = child_rows_begin; child_row < child_rows_end; ++child_row) clear_bit(out_mask.data(), child_row); } }); } /** * @brief All pushdown masks in a table. * * Pushdown masks are applied to child column(s). Only bits of the child column null mask that * correspond to set pushdown mask bits are encoded into the output file. Similarly, rows where * pushdown mask is 0 are treated as invalid and not included in the output. */ struct pushdown_null_masks { // Owning vector for masks in device memory std::vector<rmm::device_uvector<bitmask_type>> data; // Pointers to pushdown masks in device memory. Can be same for multiple columns. std::vector<bitmask_type const*> masks; }; pushdown_null_masks init_pushdown_null_masks(orc_table_view& orc_table, rmm::cuda_stream_view stream) { std::vector<bitmask_type const*> mask_ptrs; mask_ptrs.reserve(orc_table.num_columns()); std::vector<rmm::device_uvector<bitmask_type>> pd_masks; for (auto const& col : orc_table.columns) { // Leaf columns don't need pushdown masks if (col.num_children() == 0) { mask_ptrs.emplace_back(nullptr); continue; } auto const parent_pd_mask = col.is_child() ? mask_ptrs[col.parent_index()] : nullptr; auto const null_mask = col.null_mask(); if (null_mask == nullptr and parent_pd_mask == nullptr) { mask_ptrs.emplace_back(nullptr); continue; } if (col.orc_kind() == STRUCT) { if (null_mask != nullptr and parent_pd_mask == nullptr) { // Reuse own null mask mask_ptrs.emplace_back(null_mask); } else if (null_mask == nullptr and parent_pd_mask != nullptr) { // Reuse parent's pushdown mask mask_ptrs.emplace_back(parent_pd_mask); } else { // Both are nullable, allocate new pushdown mask pd_masks.emplace_back(num_bitmask_words(col.size()), stream); mask_ptrs.emplace_back(pd_masks.back().data()); thrust::transform(rmm::exec_policy(stream), null_mask, null_mask + pd_masks.back().size(), parent_pd_mask, pd_masks.back().data(), thrust::bit_and<bitmask_type>()); } } if (col.orc_kind() == LIST or col.orc_kind() == MAP) { // Need a new pushdown mask unless both the parent and current column are not nullable auto const child_col = orc_table.column(col.child_begin()[0]); // pushdown mask applies to child column(s); use the child column size pd_masks.emplace_back(num_bitmask_words(child_col.size()), stream); mask_ptrs.emplace_back(pd_masks.back().data()); pushdown_lists_null_mask(col, orc_table.d_columns, parent_pd_mask, pd_masks.back(), stream); } } // Attach null masks to device column views (async) auto const d_mask_ptrs = cudf::detail::make_device_uvector_async( mask_ptrs, stream, rmm::mr::get_current_device_resource()); thrust::for_each_n( rmm::exec_policy(stream), thrust::make_counting_iterator(0ul), orc_table.num_columns(), [cols = device_span<orc_column_device_view>{orc_table.d_columns}, ptrs = device_span<bitmask_type const* const>{d_mask_ptrs}] __device__(auto& idx) { cols[idx].pushdown_mask = ptrs[idx]; }); return {std::move(pd_masks), std::move(mask_ptrs)}; } template <typename T> struct device_stack { __device__ device_stack(T* stack_storage, int capacity) : stack(stack_storage), capacity(capacity), size(0) { } __device__ void push(T const& val) { cudf_assert(size < capacity and "Stack overflow"); stack[size++] = val; } __device__ T pop() { cudf_assert(size > 0 and "Stack underflow"); return stack[--size]; } __device__ bool empty() { return size == 0; } private: T* stack; int capacity; int size; }; orc_table_view make_orc_table_view(table_view const& table, table_device_view const& d_table, table_input_metadata const& table_meta, rmm::cuda_stream_view stream) { std::vector<orc_column_view> orc_columns; std::vector<uint32_t> str_col_indexes; std::function<void(column_view const&, orc_column_view*, column_in_metadata const&)> append_orc_column = [&](column_view const& col, orc_column_view* parent_col, column_in_metadata const& col_meta) { int const str_idx = (col.type().id() == type_id::STRING) ? static_cast<int>(str_col_indexes.size()) : -1; auto const new_col_idx = orc_columns.size(); orc_columns.emplace_back(new_col_idx, str_idx, parent_col, col, col_meta); if (orc_columns[new_col_idx].is_string()) { str_col_indexes.push_back(new_col_idx); } auto const kind = orc_columns[new_col_idx].orc_kind(); if (kind == TypeKind::LIST) { append_orc_column(col.child(lists_column_view::child_column_index), &orc_columns[new_col_idx], col_meta.child(lists_column_view::child_column_index)); } else if (kind == TypeKind::STRUCT) { for (auto child_idx = 0; child_idx != col.num_children(); ++child_idx) { append_orc_column( col.child(child_idx), &orc_columns[new_col_idx], col_meta.child(child_idx)); } } else if (kind == TypeKind::MAP) { // MAP: skip to the list child - include grandchildren columns instead of children auto const real_parent_col = col.child(lists_column_view::child_column_index); auto const& real_parent_meta = col_meta.child(lists_column_view::child_column_index); CUDF_EXPECTS(real_parent_meta.num_children() == 2, "Map struct column should have exactly two children"); // process MAP key append_orc_column( real_parent_col.child(0), &orc_columns[new_col_idx], real_parent_meta.child(0)); // process MAP value append_orc_column( real_parent_col.child(1), &orc_columns[new_col_idx], real_parent_meta.child(1)); } }; for (auto col_idx = 0; col_idx < table.num_columns(); ++col_idx) { append_orc_column(table.column(col_idx), nullptr, table_meta.column_metadata[col_idx]); } std::vector<TypeKind> type_kinds; type_kinds.reserve(orc_columns.size()); std::transform( orc_columns.cbegin(), orc_columns.cend(), std::back_inserter(type_kinds), [](auto& orc_column) { return orc_column.orc_kind(); }); auto const d_type_kinds = cudf::detail::make_device_uvector_async( type_kinds, stream, rmm::mr::get_current_device_resource()); rmm::device_uvector<orc_column_device_view> d_orc_columns(orc_columns.size(), stream); using stack_value_type = thrust::pair<column_device_view const*, thrust::optional<uint32_t>>; rmm::device_uvector<stack_value_type> stack_storage(orc_columns.size(), stream); // pre-order append ORC device columns cudf::detail::device_single_thread( [d_orc_cols = device_span<orc_column_device_view>{d_orc_columns}, d_type_kinds = device_span<TypeKind const>{d_type_kinds}, d_table = d_table, stack_storage = stack_storage.data(), stack_storage_size = stack_storage.size()] __device__() { device_stack stack(stack_storage, stack_storage_size); thrust::for_each(thrust::seq, thrust::make_reverse_iterator(d_table.end()), thrust::make_reverse_iterator(d_table.begin()), [&stack](column_device_view const& c) { stack.push({&c, thrust::nullopt}); }); uint32_t idx = 0; while (not stack.empty()) { auto [col, parent] = stack.pop(); d_orc_cols[idx] = orc_column_device_view{*col, parent}; if (d_type_kinds[idx] == TypeKind::MAP) { // Skip to the list child - do not include the child column, just grandchildren columns col = &col->children()[lists_column_view::child_column_index]; } if (col->type().id() == type_id::LIST) { stack.push({&col->children()[lists_column_view::child_column_index], idx}); } else if (col->type().id() == type_id::STRUCT) { thrust::for_each(thrust::seq, thrust::make_reverse_iterator(col->children().end()), thrust::make_reverse_iterator(col->children().begin()), [&stack, idx](column_device_view const& c) { stack.push({&c, idx}); }); } ++idx; } }, stream); return {std::move(orc_columns), std::move(d_orc_columns), str_col_indexes, cudf::detail::make_device_uvector_sync( str_col_indexes, stream, rmm::mr::get_current_device_resource())}; } hostdevice_2dvector<rowgroup_rows> calculate_rowgroup_bounds(orc_table_view const& orc_table, size_type rowgroup_size, rmm::cuda_stream_view stream) { auto const num_rowgroups = cudf::util::div_rounding_up_unsafe<size_t, size_t>(orc_table.num_rows(), rowgroup_size); hostdevice_2dvector<rowgroup_rows> rowgroup_bounds( num_rowgroups, orc_table.num_columns(), stream); thrust::for_each_n( rmm::exec_policy(stream), thrust::make_counting_iterator(0ul), num_rowgroups, [cols = device_span<orc_column_device_view const>{orc_table.d_columns}, rg_bounds = device_2dspan<rowgroup_rows>{rowgroup_bounds}, rowgroup_size] __device__(auto rg_idx) mutable { thrust::transform( thrust::seq, cols.begin(), cols.end(), rg_bounds[rg_idx].begin(), [&](auto const& col) { // Root column if (!col.parent_index.has_value()) { size_type const rows_begin = rg_idx * rowgroup_size; auto const rows_end = thrust::min<size_type>((rg_idx + 1) * rowgroup_size, col.size()); return rowgroup_rows{rows_begin, rows_end}; } else { // Child column auto const parent_index = *col.parent_index; orc_column_device_view parent_col = cols[parent_index]; auto const parent_rg = rg_bounds[rg_idx][parent_index]; if (parent_col.type().id() != type_id::LIST) { auto const offset_diff = parent_col.offset() - col.offset(); return rowgroup_rows{parent_rg.begin + offset_diff, parent_rg.end + offset_diff}; } auto offsets = parent_col.child(lists_column_view::offsets_column_index); auto const rows_begin = offsets.element<size_type>(parent_rg.begin + parent_col.offset()) - col.offset(); auto const rows_end = offsets.element<size_type>(parent_rg.end + parent_col.offset()) - col.offset(); return rowgroup_rows{rows_begin, rows_end}; } }); }); rowgroup_bounds.device_to_host_sync(stream); return rowgroup_bounds; } // returns host vector of per-rowgroup sizes encoder_decimal_info decimal_chunk_sizes(orc_table_view& orc_table, file_segmentation const& segmentation, rmm::cuda_stream_view stream) { std::map<uint32_t, rmm::device_uvector<uint32_t>> elem_sizes; // Compute per-element offsets (within each row group) on the device for (auto& orc_col : orc_table.columns) { if (orc_col.orc_kind() == DECIMAL) { auto& current_sizes = elem_sizes.insert({orc_col.index(), rmm::device_uvector<uint32_t>(orc_col.size(), stream)}) .first->second; thrust::tabulate(rmm::exec_policy_nosync(stream), current_sizes.begin(), current_sizes.end(), [d_cols = device_span<orc_column_device_view const>{orc_table.d_columns}, col_idx = orc_col.index()] __device__(auto idx) { auto const& col = d_cols[col_idx]; auto const pushdown_mask = [&]() -> cudf::bitmask_type const* { auto const parent_index = d_cols[col_idx].parent_index; if (!parent_index.has_value()) return nullptr; return d_cols[parent_index.value()].pushdown_mask; }(); if (col.is_null(idx) or not bit_value_or(pushdown_mask, idx, true)) return 0; __int128_t const element = col.type().id() == type_id::DECIMAL32 ? col.element<int32_t>(idx) : col.type().id() == type_id::DECIMAL64 ? col.element<int64_t>(idx) : col.element<__int128_t>(idx); __int128_t const sign = (element < 0) ? 1 : 0; __uint128_t zigzaged_value = ((element ^ -sign) * 2) + sign; return varint_size(zigzaged_value); }); orc_col.attach_decimal_offsets(current_sizes.data()); } } if (elem_sizes.empty()) return {}; // Compute element offsets within each row group gpu::decimal_sizes_to_offsets(segmentation.rowgroups, elem_sizes, stream); // Gather the row group sizes and copy to host auto d_tmp_rowgroup_sizes = rmm::device_uvector<uint32_t>(segmentation.num_rowgroups(), stream); std::map<uint32_t, std::vector<uint32_t>> rg_sizes; for (auto const& [col_idx, esizes] : elem_sizes) { // Copy last elem in each row group - equal to row group size thrust::tabulate(rmm::exec_policy(stream), d_tmp_rowgroup_sizes.begin(), d_tmp_rowgroup_sizes.end(), [src = esizes.data(), col_idx = col_idx, rg_bounds = device_2dspan<rowgroup_rows const>{ segmentation.rowgroups}] __device__(auto idx) { return src[rg_bounds[idx][col_idx].end - 1]; }); rg_sizes[col_idx] = cudf::detail::make_std_vector_async(d_tmp_rowgroup_sizes, stream); } return {std::move(elem_sizes), std::move(rg_sizes)}; } std::map<uint32_t, size_t> decimal_column_sizes( std::map<uint32_t, std::vector<uint32_t>> const& chunk_sizes) { std::map<uint32_t, size_t> column_sizes; std::transform(chunk_sizes.cbegin(), chunk_sizes.cend(), std::inserter(column_sizes, column_sizes.end()), [](auto const& chunk_size) -> std::pair<uint32_t, size_t> { return { chunk_size.first, std::accumulate(chunk_size.second.cbegin(), chunk_size.second.cend(), 0lu)}; }); return column_sizes; } size_t max_compression_output_size(CompressionKind compression_kind, uint32_t compression_blocksize) { if (compression_kind == NONE) return 0; return compress_max_output_chunk_size(to_nvcomp_compression_type(compression_kind), compression_blocksize); } std::unique_ptr<table_input_metadata> make_table_meta(table_view const& input) { auto table_meta = std::make_unique<table_input_metadata>(input); // Fill unnamed columns' names in table_meta std::function<void(column_in_metadata&, std::string)> add_default_name = [&](column_in_metadata& col_meta, std::string default_name) { if (col_meta.get_name().empty()) { col_meta.set_name(default_name); } for (size_type i = 0; i < col_meta.num_children(); ++i) { add_default_name(col_meta.child(i), std::to_string(i)); } }; for (size_t i = 0; i < table_meta->column_metadata.size(); ++i) { add_default_name(table_meta->column_metadata[i], "_col" + std::to_string(i)); } return table_meta; } // Computes the number of characters in each rowgroup for each string column and attaches the // results to the corresponding orc_column_view. The owning host vector is returned. auto set_rowgroup_char_counts(orc_table_view& orc_table, device_2dspan<rowgroup_rows const> rowgroup_bounds, rmm::cuda_stream_view stream) { auto const num_rowgroups = rowgroup_bounds.size().first; auto const num_str_cols = orc_table.num_string_columns(); auto counts = rmm::device_uvector<size_type>(num_str_cols * num_rowgroups, stream); auto counts_2d_view = device_2dspan<size_type>(counts.data(), num_str_cols, num_rowgroups); gpu::rowgroup_char_counts(counts_2d_view, orc_table.d_columns, rowgroup_bounds, orc_table.d_string_column_indices, stream); auto const h_counts = cudf::detail::make_std_vector_sync(counts, stream); for (auto col_idx : orc_table.string_column_indices) { auto& str_column = orc_table.column(col_idx); str_column.attach_rowgroup_char_counts( {h_counts.data() + str_column.str_index() * num_rowgroups, num_rowgroups}); } return h_counts; } // Holds the stripe dictionary descriptors and dictionary buffers. struct stripe_dictionaries { hostdevice_2dvector<gpu::stripe_dictionary> views; // descriptors [string_column][stripe] std::vector<rmm::device_uvector<uint32_t>> data_owner; // dictionary data owner, per stripe std::vector<rmm::device_uvector<uint32_t>> index_owner; // dictionary index owner, per stripe std::vector<rmm::device_uvector<uint32_t>> order_owner; // dictionary order owner, per stripe // Should be called after encoding is complete to deallocate the dictionary buffers. void on_encode_complete(rmm::cuda_stream_view stream) { data_owner.clear(); index_owner.clear(); order_owner.clear(); for (auto& sd : views.host_view().flat_view()) { sd.data = {}; sd.index = {}; sd.data_order = {}; } views.host_to_device_async(stream); } }; /** * @brief Compares two rows in a strings column */ struct string_rows_less { device_span<orc_column_device_view> cols; uint32_t col_idx; __device__ bool operator()(size_type lhs_idx, size_type rhs_idx) const { auto const& col = cols[col_idx]; return col.element<string_view>(lhs_idx) < col.element<string_view>(rhs_idx); } }; // Build stripe dictionaries for string columns stripe_dictionaries build_dictionaries(orc_table_view& orc_table, file_segmentation const& segmentation, bool sort_dictionaries, rmm::cuda_stream_view stream) { std::vector<std::vector<rmm::device_uvector<gpu::slot_type>>> hash_maps_storage( orc_table.string_column_indices.size()); for (auto col_idx : orc_table.string_column_indices) { auto& str_column = orc_table.column(col_idx); for (auto const& stripe : segmentation.stripes) { auto const stripe_num_rows = stripe.size == 0 ? 0 : segmentation.rowgroups[stripe.first + stripe.size - 1][col_idx].end - segmentation.rowgroups[stripe.first][col_idx].begin; hash_maps_storage[str_column.str_index()].emplace_back(stripe_num_rows * 1.43, stream); } } hostdevice_2dvector<gpu::stripe_dictionary> stripe_dicts( orc_table.num_string_columns(), segmentation.num_stripes(), stream); if (stripe_dicts.count() == 0) return {std::move(stripe_dicts), {}, {}}; // Initialize stripe dictionaries for (auto col_idx : orc_table.string_column_indices) { auto& str_column = orc_table.column(col_idx); auto const str_col_idx = str_column.str_index(); str_column.attach_stripe_dicts(stripe_dicts[str_col_idx], stripe_dicts.device_view()[str_col_idx]); for (auto const& stripe : segmentation.stripes) { auto const stripe_idx = stripe.id; auto& sd = stripe_dicts[str_col_idx][stripe_idx]; sd.map_slots = hash_maps_storage[str_col_idx][stripe_idx]; sd.column_idx = col_idx; sd.start_row = segmentation.rowgroups[stripe.first][col_idx].begin; sd.start_rowgroup = stripe.first; sd.num_rows = segmentation.rowgroups[stripe.first + stripe.size - 1][col_idx].end - sd.start_row; sd.entry_count = 0; sd.char_count = 0; } } stripe_dicts.host_to_device_async(stream); gpu::initialize_dictionary_hash_maps(stripe_dicts, stream); gpu::populate_dictionary_hash_maps(stripe_dicts, orc_table.d_columns, stream); // Copy the entry counts and char counts from the device to the host stripe_dicts.device_to_host_sync(stream); // Data owners; can be cleared after encode std::vector<rmm::device_uvector<uint32_t>> dict_data_owner; std::vector<rmm::device_uvector<uint32_t>> dict_index_owner; std::vector<rmm::device_uvector<uint32_t>> dict_order_owner; // Make decision about which stripes to encode with dictionary encoding for (auto col_idx : orc_table.string_column_indices) { auto& str_column = orc_table.column(col_idx); bool col_use_dictionary{false}; for (auto const& stripe : segmentation.stripes) { auto const stripe_idx = stripe.id; auto const str_col_idx = str_column.str_index(); auto& sd = stripe_dicts[str_col_idx][stripe_idx]; auto const direct_char_count = std::accumulate( thrust::make_counting_iterator(stripe.first), thrust::make_counting_iterator(stripe.first + stripe.size), 0, [&](auto total, auto const& rg) { return total + str_column.rowgroup_char_count(rg); }); // Enable dictionary encoding if the dictionary size is smaller than the direct encode size // The estimate excludes the LENGTH stream size, which is present in both cases sd.is_enabled = [&]() { auto const dict_index_size = varint_size(sd.entry_count); return sd.char_count + dict_index_size * sd.entry_count < direct_char_count; }(); if (sd.is_enabled) { dict_data_owner.emplace_back(sd.entry_count, stream); sd.data = dict_data_owner.back(); col_use_dictionary = true; } else { // Clear hash map storage as dictionary encoding is not used for this stripe hash_maps_storage[str_col_idx][stripe_idx] = rmm::device_uvector<gpu::slot_type>(0, stream); sd.map_slots = {}; } } // If any stripe uses dictionary encoding, allocate index storage for the whole column if (col_use_dictionary) { dict_index_owner.emplace_back(str_column.size(), stream); for (auto& sd : stripe_dicts[str_column.str_index()]) { sd.index = dict_index_owner.back(); } } } stripe_dicts.host_to_device_async(stream); gpu::collect_map_entries(stripe_dicts, stream); gpu::get_dictionary_indices(stripe_dicts, orc_table.d_columns, stream); // deallocate hash map storage, unused after this point hash_maps_storage.clear(); // Clear map slots and attach order buffers auto dictionaries_flat = stripe_dicts.host_view().flat_view(); for (auto& sd : dictionaries_flat) { if (not sd.is_enabled) { continue; } sd.map_slots = {}; if (sort_dictionaries) { dict_order_owner.emplace_back(sd.entry_count, stream); sd.data_order = dict_order_owner.back(); } else { sd.data_order = {}; } } stripe_dicts.host_to_device_async(stream); // Sort stripe dictionaries alphabetically if (sort_dictionaries) { auto streams = cudf::detail::fork_streams(stream, std::min<size_t>(dict_order_owner.size(), 8)); auto stream_idx = 0; for (auto& sd : dictionaries_flat) { if (not sd.is_enabled) { continue; } auto const& current_stream = streams[stream_idx]; // Sort the dictionary data and create a mapping from the sorted order to the original thrust::sequence( rmm::exec_policy_nosync(current_stream), sd.data_order.begin(), sd.data_order.end()); thrust::sort_by_key(rmm::exec_policy_nosync(current_stream), sd.data.begin(), sd.data.end(), sd.data_order.begin(), string_rows_less{orc_table.d_columns, sd.column_idx}); // Create the inverse permutation - i.e. the mapping from the original order to the sorted auto order_copy = cudf::detail::make_device_uvector_async<uint32_t>( sd.data_order, current_stream, rmm::mr::get_current_device_resource()); thrust::scatter(rmm::exec_policy_nosync(current_stream), thrust::counting_iterator<uint32_t>(0), thrust::counting_iterator<uint32_t>(sd.data_order.size()), order_copy.begin(), sd.data_order.begin()); stream_idx = (stream_idx + 1) % streams.size(); } cudf::detail::join_streams(streams, stream); } return {std::move(stripe_dicts), std::move(dict_data_owner), std::move(dict_index_owner), std::move(dict_order_owner)}; } /** * @brief Perform the processing steps needed to convert the input table into the output ORC data * for writing, such as compression and ORC encoding. * * @param input The input table * @param table_meta The table metadata * @param max_stripe_size Maximum size of stripes in the output file * @param row_index_stride The row index stride * @param enable_dictionary Whether dictionary is enabled * @param sort_dictionaries Whether to sort the dictionaries * @param compression_kind The compression kind * @param compression_blocksize The block size used for compression * @param stats_freq Column statistics granularity type for parquet/orc writers * @param collect_compression_stats Flag to indicate if compression statistics should be collected * @param write_mode Flag to indicate if there is only a single table write * @param out_sink Sink for writing data * @param stream CUDA stream used for device memory operations and kernel launches * @return A tuple of the intermediate results containing the processed data */ auto convert_table_to_orc_data(table_view const& input, table_input_metadata const& table_meta, stripe_size_limits max_stripe_size, size_type row_index_stride, bool enable_dictionary, bool sort_dictionaries, CompressionKind compression_kind, size_t compression_blocksize, statistics_freq stats_freq, bool collect_compression_stats, single_write_mode write_mode, data_sink const& out_sink, rmm::cuda_stream_view stream) { auto const input_tview = table_device_view::create(input, stream); auto orc_table = make_orc_table_view(input, *input_tview, table_meta, stream); // This is unused but it holds memory buffers for later access thus needs to be kept alive. [[maybe_unused]] auto const pd_masks = init_pushdown_null_masks(orc_table, stream); auto rowgroup_bounds = calculate_rowgroup_bounds(orc_table, row_index_stride, stream); [[maybe_unused]] auto const rg_char_counts_data = set_rowgroup_char_counts(orc_table, rowgroup_bounds, stream); // Decide stripe boundaries based on rowgroups and char counts auto segmentation = calculate_segmentation(orc_table.columns, std::move(rowgroup_bounds), max_stripe_size); auto stripe_dicts = build_dictionaries(orc_table, segmentation, sort_dictionaries, stream); auto dec_chunk_sizes = decimal_chunk_sizes(orc_table, segmentation, stream); auto const uncompressed_block_align = uncomp_block_alignment(compression_kind); auto const compressed_block_align = comp_block_alignment(compression_kind); auto streams = create_streams(orc_table.columns, segmentation, decimal_column_sizes(dec_chunk_sizes.rg_sizes), enable_dictionary, compression_kind, write_mode); auto enc_data = encode_columns( orc_table, std::move(dec_chunk_sizes), segmentation, streams, uncompressed_block_align, stream); stripe_dicts.on_encode_complete(stream); auto const num_rows = input.num_rows(); // Assemble individual disparate column chunks into contiguous data streams size_type const num_index_streams = (orc_table.num_columns() + 1); auto const num_data_streams = streams.size() - num_index_streams; hostdevice_2dvector<gpu::StripeStream> strm_descs( segmentation.num_stripes(), num_data_streams, stream); auto stripes = gather_stripes(num_index_streams, segmentation, &enc_data, &strm_descs, stream); if (num_rows == 0) { return std::tuple{std::move(enc_data), std::move(segmentation), std::move(orc_table), rmm::device_uvector<uint8_t>{0, stream}, // compressed_data cudf::detail::hostdevice_vector<compression_result>{}, // comp_results std::move(strm_descs), intermediate_statistics{stream}, std::optional<writer_compression_statistics>{}, std::move(streams), std::move(stripes), std::move(stripe_dicts.views), cudf::detail::pinned_host_vector<uint8_t>()}; } // Allocate intermediate output stream buffer size_t compressed_bfr_size = 0; size_t num_compressed_blocks = 0; auto const max_compressed_block_size = max_compression_output_size(compression_kind, compression_blocksize); auto const padded_max_compressed_block_size = util::round_up_unsafe<size_t>(max_compressed_block_size, compressed_block_align); auto const padded_block_header_size = util::round_up_unsafe<size_t>(block_header_size, compressed_block_align); auto bounce_buffer = [&]() { size_t max_stream_size = 0; bool all_device_write = true; for (auto& ss : strm_descs.host_view().flat_view()) { if (!out_sink.is_device_write_preferred(ss.stream_size)) { all_device_write = false; } size_t stream_size = ss.stream_size; if (compression_kind != NONE) { ss.first_block = num_compressed_blocks; ss.bfr_offset = compressed_bfr_size; auto num_blocks = std::max<uint32_t>((stream_size + compression_blocksize - 1) / compression_blocksize, 1); stream_size += num_blocks * block_header_size; num_compressed_blocks += num_blocks; compressed_bfr_size += (padded_block_header_size + padded_max_compressed_block_size) * num_blocks; } max_stream_size = std::max(max_stream_size, stream_size); } return cudf::detail::pinned_host_vector<uint8_t>(all_device_write ? 0 : max_stream_size); }(); // Compress the data streams rmm::device_uvector<uint8_t> compressed_data(compressed_bfr_size, stream); cudf::detail::hostdevice_vector<compression_result> comp_results(num_compressed_blocks, stream); std::optional<writer_compression_statistics> compression_stats; thrust::fill(rmm::exec_policy(stream), comp_results.d_begin(), comp_results.d_end(), compression_result{0, compression_status::FAILURE}); if (compression_kind != NONE) { strm_descs.host_to_device_async(stream); compression_stats = gpu::CompressOrcDataStreams(compressed_data, num_compressed_blocks, compression_kind, compression_blocksize, max_compressed_block_size, compressed_block_align, collect_compression_stats, strm_descs, enc_data.streams, comp_results, stream); // deallocate encoded data as it is not needed anymore enc_data.data.clear(); strm_descs.device_to_host_async(stream); comp_results.device_to_host_sync(stream); } auto intermediate_stats = gather_statistic_blobs(stats_freq, orc_table, segmentation, stream); return std::tuple{std::move(enc_data), std::move(segmentation), std::move(orc_table), std::move(compressed_data), std::move(comp_results), std::move(strm_descs), std::move(intermediate_stats), std::move(compression_stats), std::move(streams), std::move(stripes), std::move(stripe_dicts.views), std::move(bounce_buffer)}; } } // namespace writer::impl::impl(std::unique_ptr<data_sink> sink, orc_writer_options const& options, single_write_mode mode, rmm::cuda_stream_view stream) : _stream(stream), _max_stripe_size{options.get_stripe_size_bytes(), options.get_stripe_size_rows()}, _row_index_stride{options.get_row_index_stride()}, _compression_kind(to_orc_compression(options.get_compression())), _compression_blocksize(compression_block_size(_compression_kind)), _compression_statistics(options.get_compression_statistics()), _stats_freq(options.get_statistics_freq()), _sort_dictionaries{options.get_enable_dictionary_sort()}, _single_write_mode(mode), _kv_meta(options.get_key_value_metadata()), _out_sink(std::move(sink)) { if (options.get_metadata()) { _table_meta = std::make_unique<table_input_metadata>(*options.get_metadata()); } init_state(); } writer::impl::impl(std::unique_ptr<data_sink> sink, chunked_orc_writer_options const& options, single_write_mode mode, rmm::cuda_stream_view stream) : _stream(stream), _max_stripe_size{options.get_stripe_size_bytes(), options.get_stripe_size_rows()}, _row_index_stride{options.get_row_index_stride()}, _compression_kind(to_orc_compression(options.get_compression())), _compression_blocksize(compression_block_size(_compression_kind)), _compression_statistics(options.get_compression_statistics()), _stats_freq(options.get_statistics_freq()), _sort_dictionaries{options.get_enable_dictionary_sort()}, _single_write_mode(mode), _kv_meta(options.get_key_value_metadata()), _out_sink(std::move(sink)) { if (options.get_metadata()) { _table_meta = std::make_unique<table_input_metadata>(*options.get_metadata()); } init_state(); } writer::impl::~impl() { close(); } void writer::impl::init_state() { // Write file header _out_sink->host_write(MAGIC, std::strlen(MAGIC)); } void writer::impl::write(table_view const& input) { CUDF_EXPECTS(not _closed, "Data has already been flushed to out and closed"); if (not _table_meta) { _table_meta = make_table_meta(input); } // All kinds of memory allocation and data compressions/encoding are performed here. // If any error occurs, such as out-of-memory exception, the internal state of the current writer // is still intact. // Note that `out_sink_` is intentionally passed by const reference to prevent accidentally // writing anything to it. [[maybe_unused]] auto [enc_data, segmentation, orc_table, compressed_data, comp_results, strm_descs, intermediate_stats, compression_stats, streams, stripes, stripe_dicts, /* unused, but its data will be accessed via pointer later */ bounce_buffer] = [&] { try { return convert_table_to_orc_data(input, *_table_meta, _max_stripe_size, _row_index_stride, _enable_dictionary, _sort_dictionaries, _compression_kind, _compression_blocksize, _stats_freq, _compression_statistics != nullptr, _single_write_mode, *_out_sink, _stream); } catch (...) { // catch any exception type CUDF_LOG_ERROR( "ORC writer encountered exception during processing. " "No data has been written to the sink."); throw; // this throws the same exception } }(); // Compression/encoding were all successful. Now write the intermediate results. write_orc_data_to_sink(enc_data, segmentation, orc_table, compressed_data, comp_results, strm_descs, intermediate_stats.rowgroup_blobs, streams, stripes, bounce_buffer); // Update data into the footer. This needs to be called even when num_rows==0. add_table_to_footer_data(orc_table, stripes); // Update file-level and compression statistics update_statistics(orc_table.num_rows(), std::move(intermediate_stats), compression_stats); } void writer::impl::update_statistics( size_type num_rows, intermediate_statistics&& intermediate_stats, std::optional<writer_compression_statistics> const& compression_stats) { if (intermediate_stats.stripe_stat_chunks.size() > 0) { _persisted_stripe_statistics.persist( num_rows, _single_write_mode, std::move(intermediate_stats), _stream); } if (compression_stats.has_value() and _compression_statistics != nullptr) { *_compression_statistics += compression_stats.value(); } } void writer::impl::write_orc_data_to_sink(encoded_data const& enc_data, file_segmentation const& segmentation, orc_table_view const& orc_table, device_span<uint8_t const> compressed_data, host_span<compression_result const> comp_results, host_2dspan<gpu::StripeStream const> strm_descs, host_span<ColStatsBlob const> rg_stats, orc_streams& streams, host_span<StripeInformation> stripes, host_span<uint8_t> bounce_buffer) { if (orc_table.num_rows() == 0) { return; } // Write stripes std::vector<std::future<void>> write_tasks; for (size_t stripe_id = 0; stripe_id < stripes.size(); ++stripe_id) { auto& stripe = stripes[stripe_id]; stripe.offset = _out_sink->bytes_written(); // Column (skippable) index streams appear at the start of the stripe size_type const num_index_streams = (orc_table.num_columns() + 1); for (size_type stream_id = 0; stream_id < num_index_streams; ++stream_id) { write_index_stream(stripe_id, stream_id, orc_table.columns, segmentation, enc_data.streams, strm_descs, comp_results, rg_stats, &stripe, &streams, _compression_kind, _compression_blocksize, _out_sink); } // Column data consisting one or more separate streams for (auto const& strm_desc : strm_descs[stripe_id]) { write_tasks.push_back(write_data_stream( strm_desc, enc_data.streams[strm_desc.column_id][segmentation.stripes[stripe_id].first], compressed_data.data(), bounce_buffer.data(), &stripe, &streams, _compression_kind, _out_sink, _stream)); } // Write stripefooter consisting of stream information StripeFooter sf; sf.streams = streams; sf.columns.resize(orc_table.num_columns() + 1); sf.columns[0].kind = DIRECT; for (size_t i = 1; i < sf.columns.size(); ++i) { sf.columns[i].kind = orc_table.column(i - 1).orc_encoding(); sf.columns[i].dictionarySize = (sf.columns[i].kind == DICTIONARY_V2) ? orc_table.column(i - 1).host_stripe_dict(stripe_id).entry_count : 0; if (orc_table.column(i - 1).orc_kind() == TIMESTAMP) { sf.writerTimezone = "UTC"; } } ProtobufWriter pbw((_compression_kind != NONE) ? 3 : 0); pbw.write(sf); stripe.footerLength = pbw.size(); if (_compression_kind != NONE) { uint32_t uncomp_sf_len = (stripe.footerLength - 3) * 2 + 1; pbw.buffer()[0] = static_cast<uint8_t>(uncomp_sf_len >> 0); pbw.buffer()[1] = static_cast<uint8_t>(uncomp_sf_len >> 8); pbw.buffer()[2] = static_cast<uint8_t>(uncomp_sf_len >> 16); } _out_sink->host_write(pbw.data(), pbw.size()); } for (auto const& task : write_tasks) { task.wait(); } } void writer::impl::add_table_to_footer_data(orc_table_view const& orc_table, std::vector<StripeInformation>& stripes) { if (_ffooter.headerLength == 0) { // First call _ffooter.headerLength = std::strlen(MAGIC); _ffooter.rowIndexStride = _row_index_stride; _ffooter.types.resize(1 + orc_table.num_columns()); _ffooter.types[0].kind = STRUCT; for (auto const& column : orc_table.columns) { if (!column.is_child()) { _ffooter.types[0].subtypes.emplace_back(column.id()); _ffooter.types[0].fieldNames.emplace_back(column.orc_name()); } } for (auto const& column : orc_table.columns) { auto& schema_type = _ffooter.types[column.id()]; schema_type.kind = column.orc_kind(); if (column.orc_kind() == DECIMAL) { schema_type.scale = static_cast<uint32_t>(column.scale()); schema_type.precision = column.precision(); } std::transform(column.child_begin(), column.child_end(), std::back_inserter(schema_type.subtypes), [&](auto const& child_idx) { return orc_table.column(child_idx).id(); }); if (column.orc_kind() == STRUCT) { std::transform(column.child_begin(), column.child_end(), std::back_inserter(schema_type.fieldNames), [&](auto const& child_idx) { return std::string{orc_table.column(child_idx).orc_name()}; }); } } } else { // verify the user isn't passing mismatched tables CUDF_EXPECTS(_ffooter.types.size() == 1 + orc_table.num_columns(), "Mismatch in table structure between multiple calls to write"); CUDF_EXPECTS( std::all_of(orc_table.columns.cbegin(), orc_table.columns.cend(), [&](auto const& col) { return _ffooter.types[col.id()].kind == col.orc_kind(); }), "Mismatch in column types between multiple calls to write"); } _ffooter.stripes.insert(_ffooter.stripes.end(), std::make_move_iterator(stripes.begin()), std::make_move_iterator(stripes.end())); _ffooter.numberOfRows += orc_table.num_rows(); } void writer::impl::close() { if (_closed) { return; } _closed = true; PostScript ps; auto const statistics = finish_statistic_blobs(_ffooter.stripes.size(), _persisted_stripe_statistics, _stream); // File-level statistics if (not statistics.file_level.empty()) { ProtobufWriter pbw; pbw.put_uint(encode_field_number<size_type>(1)); pbw.put_uint(_persisted_stripe_statistics.num_rows); // First entry contains total number of rows _ffooter.statistics.reserve(_ffooter.types.size()); _ffooter.statistics.emplace_back(pbw.release()); // Add file stats, stored after stripe stats in `column_stats` _ffooter.statistics.insert(_ffooter.statistics.end(), std::make_move_iterator(statistics.file_level.begin()), std::make_move_iterator(statistics.file_level.end())); } // Stripe-level statistics if (not statistics.stripe_level.empty()) { _orc_meta.stripeStats.resize(_ffooter.stripes.size()); for (size_t stripe_id = 0; stripe_id < _ffooter.stripes.size(); stripe_id++) { _orc_meta.stripeStats[stripe_id].colStats.resize(_ffooter.types.size()); ProtobufWriter pbw; pbw.put_uint(encode_field_number<size_type>(1)); pbw.put_uint(_ffooter.stripes[stripe_id].numberOfRows); _orc_meta.stripeStats[stripe_id].colStats[0] = pbw.release(); for (size_t col_idx = 0; col_idx < _ffooter.types.size() - 1; col_idx++) { size_t idx = _ffooter.stripes.size() * col_idx + stripe_id; _orc_meta.stripeStats[stripe_id].colStats[1 + col_idx] = std::move(statistics.stripe_level[idx]); } } } _persisted_stripe_statistics.clear(); _ffooter.contentLength = _out_sink->bytes_written(); std::transform(_kv_meta.begin(), _kv_meta.end(), std::back_inserter(_ffooter.metadata), [&](auto const& udata) { return UserMetadataItem{udata.first, udata.second}; }); // Write statistics metadata if (not _orc_meta.stripeStats.empty()) { ProtobufWriter pbw((_compression_kind != NONE) ? 3 : 0); pbw.write(_orc_meta); add_uncompressed_block_headers(_compression_kind, _compression_blocksize, pbw.buffer()); ps.metadataLength = pbw.size(); _out_sink->host_write(pbw.data(), pbw.size()); } else { ps.metadataLength = 0; } ProtobufWriter pbw((_compression_kind != NONE) ? 3 : 0); pbw.write(_ffooter); add_uncompressed_block_headers(_compression_kind, _compression_blocksize, pbw.buffer()); // Write postscript metadata ps.footerLength = pbw.size(); ps.compression = _compression_kind; ps.compressionBlockSize = _compression_blocksize; ps.version = {0, 12}; ps.magic = MAGIC; auto const ps_length = static_cast<uint8_t>(pbw.write(ps)); pbw.put_byte(ps_length); _out_sink->host_write(pbw.data(), pbw.size()); _out_sink->flush(); } // Forward to implementation writer::writer(std::unique_ptr<data_sink> sink, orc_writer_options const& options, single_write_mode mode, rmm::cuda_stream_view stream) : _impl(std::make_unique<impl>(std::move(sink), options, mode, stream)) { } // Forward to implementation writer::writer(std::unique_ptr<data_sink> sink, chunked_orc_writer_options const& options, single_write_mode mode, rmm::cuda_stream_view stream) : _impl(std::make_unique<impl>(std::move(sink), options, mode, stream)) { } // Destructor within this translation unit writer::~writer() = default; // Forward to implementation void writer::write(table_view const& table) { _impl->write(table); } // Forward to implementation void writer::close() { _impl->close(); } } // namespace orc } // namespace detail } // namespace io } // namespace cudf
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/orc/writer_impl.hpp
/* * Copyright (c) 2019-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #pragma once #include "orc.hpp" #include "orc_gpu.hpp" #include <io/utilities/hostdevice_vector.hpp> #include <cudf/detail/utilities/integer_utils.hpp> #include <cudf/io/data_sink.hpp> #include <cudf/io/detail/orc.hpp> #include <cudf/io/orc.hpp> #include <cudf/table/table.hpp> #include <cudf/table/table_device_view.cuh> #include <cudf/utilities/error.hpp> #include <rmm/cuda_stream_view.hpp> #include <rmm/device_uvector.hpp> #include <thrust/host_vector.h> #include <thrust/iterator/counting_iterator.h> #include <memory> #include <string> #include <vector> namespace cudf { namespace io { namespace detail { namespace orc { // Forward internal classes class orc_column_view; using namespace cudf::io::orc; using namespace cudf::io; using cudf::detail::device_2dspan; using cudf::detail::host_2dspan; using cudf::detail::hostdevice_2dvector; /** * Non-owning view of a cuDF table that includes ORC-related information. * * Columns hierarchy is flattened and stored in pre-order. */ struct orc_table_view { std::vector<orc_column_view> columns; rmm::device_uvector<orc_column_device_view> d_columns; std::vector<uint32_t> string_column_indices; rmm::device_uvector<uint32_t> d_string_column_indices; auto num_columns() const noexcept { return columns.size(); } [[nodiscard]] size_type num_rows() const noexcept; auto num_string_columns() const noexcept { return string_column_indices.size(); } auto& column(uint32_t idx) { return columns.at(idx); } [[nodiscard]] auto const& column(uint32_t idx) const { return columns.at(idx); } auto& string_column(uint32_t idx) { return columns.at(string_column_indices.at(idx)); } [[nodiscard]] auto const& string_column(uint32_t idx) const { return columns.at(string_column_indices.at(idx)); } }; /** * @brief Indices of rowgroups contained in a stripe. * * Provides a container-like interface to iterate over rowgroup indices. */ struct stripe_rowgroups { uint32_t id; // stripe id uint32_t first; // first rowgroup in the stripe uint32_t size; // number of rowgroups in the stripe stripe_rowgroups(uint32_t id, uint32_t first, uint32_t size) : id{id}, first{first}, size{size} {} [[nodiscard]] auto cbegin() const { return thrust::make_counting_iterator(first); } [[nodiscard]] auto cend() const { return thrust::make_counting_iterator(first + size); } }; /** * @brief Holds the sizes of encoded elements of decimal columns. */ struct encoder_decimal_info { std::map<uint32_t, rmm::device_uvector<uint32_t>> elem_sizes; ///< Column index -> per-element size map std::map<uint32_t, std::vector<uint32_t>> rg_sizes; ///< Column index -> per-rowgroup size map }; /** * @brief List of per-column ORC streams. * * Provides interface to calculate their offsets. */ class orc_streams { public: orc_streams(std::vector<Stream> streams, std::vector<int32_t> ids, std::vector<TypeKind> types) : streams{std::move(streams)}, ids{std::move(ids)}, types{std::move(types)} { } Stream const& operator[](int idx) const { return streams[idx]; } Stream& operator[](int idx) { return streams[idx]; } auto id(int idx) const { return ids[idx]; } auto& id(int idx) { return ids[idx]; } auto type(int idx) const { return types[idx]; } auto size() const { return streams.size(); } operator std::vector<Stream> const&() const { return streams; } private: std::vector<Stream> streams; std::vector<int32_t> ids; std::vector<TypeKind> types; }; /** * @brief Description of how the ORC file is segmented into stripes and rowgroups. */ struct file_segmentation { hostdevice_2dvector<rowgroup_rows> rowgroups; std::vector<stripe_rowgroups> stripes; auto num_rowgroups() const noexcept { return rowgroups.size().first; } auto num_stripes() const noexcept { return stripes.size(); } }; /** * @brief ORC per-chunk streams of encoded data. */ struct encoded_data { std::vector<std::vector<rmm::device_uvector<uint8_t>>> data; // Owning array of the encoded data hostdevice_2dvector<gpu::encoder_chunk_streams> streams; // streams of encoded data, per chunk }; /** * @brief Dictionary data for string columns and their device views, per column. */ struct string_dictionaries { std::vector<rmm::device_uvector<uint32_t>> data; std::vector<rmm::device_uvector<uint32_t>> index; rmm::device_uvector<device_span<uint32_t>> d_data_view; rmm::device_uvector<device_span<uint32_t>> d_index_view; // Dictionaries are currently disabled for columns with a rowgroup larger than 2^15 thrust::host_vector<bool> dictionary_enabled; }; /** * @brief Maximum size of stripes in the output file. */ struct stripe_size_limits { size_t bytes; size_type rows; }; /** * @brief Statistics data stored between calls to write for chunked writes * */ struct intermediate_statistics { explicit intermediate_statistics(rmm::cuda_stream_view stream) : stripe_stat_chunks(0, stream) {} intermediate_statistics(std::vector<ColStatsBlob> rb, rmm::device_uvector<statistics_chunk> sc, cudf::detail::hostdevice_vector<statistics_merge_group> smg, std::vector<statistics_dtype> sdt, std::vector<data_type> sct) : rowgroup_blobs(std::move(rb)), stripe_stat_chunks(std::move(sc)), stripe_stat_merge(std::move(smg)), stats_dtypes(std::move(sdt)), col_types(std::move(sct)) { } // blobs for the rowgroups. Not persisted std::vector<ColStatsBlob> rowgroup_blobs; rmm::device_uvector<statistics_chunk> stripe_stat_chunks; cudf::detail::hostdevice_vector<statistics_merge_group> stripe_stat_merge; std::vector<statistics_dtype> stats_dtypes; std::vector<data_type> col_types; }; /** * @brief used for chunked writes to persist data between calls to write. * */ struct persisted_statistics { void clear() { stripe_stat_chunks.clear(); stripe_stat_merge.clear(); string_pools.clear(); stats_dtypes.clear(); col_types.clear(); num_rows = 0; } void persist(int num_table_rows, single_write_mode write_mode, intermediate_statistics&& intermediate_stats, rmm::cuda_stream_view stream); std::vector<rmm::device_uvector<statistics_chunk>> stripe_stat_chunks; std::vector<cudf::detail::hostdevice_vector<statistics_merge_group>> stripe_stat_merge; std::vector<rmm::device_uvector<char>> string_pools; std::vector<statistics_dtype> stats_dtypes; std::vector<data_type> col_types; int num_rows = 0; }; /** * @brief Protobuf encoded statistics created at file close * */ struct encoded_footer_statistics { std::vector<ColStatsBlob> stripe_level; std::vector<ColStatsBlob> file_level; }; /** * @brief Implementation for ORC writer */ class writer::impl { // ORC datasets start with a 3 byte header static constexpr char const* MAGIC = "ORC"; public: /** * @brief Constructor with writer options. * * @param sink Output sink * @param options Settings for controlling behavior * @param mode Option to write at once or in chunks * @param stream CUDA stream used for device memory operations and kernel launches */ explicit impl(std::unique_ptr<data_sink> sink, orc_writer_options const& options, single_write_mode mode, rmm::cuda_stream_view stream); /** * @brief Constructor with chunked writer options. * * @param sink Output sink * @param options Settings for controlling behavior * @param mode Option to write at once or in chunks * @param stream CUDA stream used for device memory operations and kernel launches */ explicit impl(std::unique_ptr<data_sink> sink, chunked_orc_writer_options const& options, single_write_mode mode, rmm::cuda_stream_view stream); /** * @brief Destructor to complete any incomplete write and release resources. */ ~impl(); /** * @brief Begins the chunked/streamed write process. */ void init_state(); /** * @brief Writes a single subtable as part of a larger ORC file/table write. * * @param table The table information to be written */ void write(table_view const& table); /** * @brief Finishes the chunked/streamed write process. */ void close(); private: /** * @brief Write the intermediate ORC data into the data sink. * * The intermediate data is generated from processing (compressing/encoding) an cuDF input table * by `convert_table_to_orc_data` called in the `write()` function. * * @param[in] enc_data ORC per-chunk streams of encoded data * @param[in] segmentation Description of how the ORC file is segmented into stripes and rowgroups * @param[in] orc_table Non-owning view of a cuDF table that includes ORC-related information * @param[in] compressed_data Compressed stream data * @param[in] comp_results Status of data compression * @param[in] strm_descs List of stream descriptors * @param[in] rg_stats row group level statistics * @param[in,out] streams List of stream descriptors * @param[in,out] stripes List of stripe description * @param[out] bounce_buffer Temporary host output buffer */ void write_orc_data_to_sink(encoded_data const& enc_data, file_segmentation const& segmentation, orc_table_view const& orc_table, device_span<uint8_t const> compressed_data, host_span<compression_result const> comp_results, host_2dspan<gpu::StripeStream const> strm_descs, host_span<ColStatsBlob const> rg_stats, orc_streams& streams, host_span<StripeInformation> stripes, host_span<uint8_t> bounce_buffer); /** * @brief Add the processed table data into the internal file footer. * * @param orc_table Non-owning view of a cuDF table that includes ORC-related information * @param stripes List of stripe description */ void add_table_to_footer_data(orc_table_view const& orc_table, std::vector<StripeInformation>& stripes); /** * @brief Update writer-level statistics with data from the current table. * * @param num_rows Number of rows in the current table * @param single_table_stats Statistics data from the current table * @param compression_stats Compression statistics from the current table */ void update_statistics(size_type num_rows, intermediate_statistics&& single_table_stats, std::optional<writer_compression_statistics> const& compression_stats); private: // CUDA stream. rmm::cuda_stream_view const _stream; // Writer options. stripe_size_limits const _max_stripe_size; size_type const _row_index_stride; CompressionKind const _compression_kind; size_t const _compression_blocksize; std::shared_ptr<writer_compression_statistics> _compression_statistics; // Optional output statistics_freq const _stats_freq; bool const _sort_dictionaries; single_write_mode const _single_write_mode; // Special parameter only used by `write()` to // indicate that we are guaranteeing a single table // write. This enables some internal optimizations. std::map<std::string, std::string> const _kv_meta; // Optional user metadata. std::unique_ptr<data_sink> const _out_sink; // Debug parameter---currently not yet supported to be user-specified. static bool constexpr _enable_dictionary = true; // Internal states, filled during `write()` and written to sink during `write` and `close()`. std::unique_ptr<table_input_metadata> _table_meta; cudf::io::orc::FileFooter _ffooter; cudf::io::orc::Metadata _orc_meta; persisted_statistics _persisted_stripe_statistics; // Statistics data saved between calls. bool _closed = false; // To track if the output has been written to sink. }; } // namespace orc } // namespace detail } // namespace io } // namespace cudf
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/orc/stripe_data.cu
/* * Copyright (c) 2019-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "orc_gpu.hpp" #include <cudf/io/orc_types.hpp> #include <io/utilities/block_utils.cuh> #include <cub/cub.cuh> #include <rmm/cuda_stream_view.hpp> namespace cudf { namespace io { namespace orc { namespace gpu { using cudf::io::detail::string_index_pair; // Must be able to handle 512x 8-byte values. These values are base 128 encoded // so 8 byte value is expanded to 10 bytes. constexpr int bytestream_buffer_size = 512 * 8 * 2; constexpr int bytestream_buffer_mask = (bytestream_buffer_size - 1) >> 2; // TODO: Should be more efficient with 512 threads per block and circular queue for values constexpr int num_warps = 32; constexpr int block_size = 32 * num_warps; // Add some margin to look ahead to future rows in case there are many zeroes constexpr int row_decoder_buffer_size = block_size + 128; inline __device__ uint8_t is_rlev1(uint8_t encoding_mode) { return encoding_mode < DIRECT_V2; } inline __device__ uint8_t is_dictionary(uint8_t encoding_mode) { return encoding_mode & 1; } struct orc_bytestream_s { uint8_t const* base; uint32_t pos; uint32_t len; uint32_t fill_pos; uint32_t fill_count; union { uint8_t u8[bytestream_buffer_size]; uint32_t u32[bytestream_buffer_size >> 2]; uint2 u64[bytestream_buffer_size >> 3]; } buf; }; struct orc_rlev1_state_s { uint32_t num_runs; uint32_t num_vals; int32_t run_data[num_warps * 12]; // (delta << 24) | (count << 16) | (first_val) }; struct orc_rlev2_state_s { uint32_t num_runs; uint32_t num_vals; union { uint32_t u32[num_warps]; uint64_t u64[num_warps]; } baseval; uint16_t m2_pw_byte3[num_warps]; int64_t delta[num_warps]; uint16_t runs_loc[block_size]; }; struct orc_byterle_state_s { uint32_t num_runs; uint32_t num_vals; uint32_t runs_loc[num_warps]; uint32_t runs_pos[num_warps]; }; struct orc_rowdec_state_s { uint32_t nz_count; uint32_t row[row_decoder_buffer_size]; // 0=skip, >0: row position relative to cur_row }; struct orc_strdict_state_s { DictionaryEntry* local_dict; uint32_t dict_pos; uint32_t dict_len; }; struct orc_datadec_state_s { uint32_t cur_row; // starting row of current batch uint32_t end_row; // ending row of this chunk (start_row + num_rows) uint32_t max_vals; // max # of non-zero values to decode in this batch uint32_t nrows; // # of rows in current batch (up to block_size) uint32_t buffered_count; // number of buffered values in the secondary data stream duration_s tz_epoch; // orc_ut_epoch - ut_offset RowGroup index; }; struct orcdec_state_s { ColumnDesc chunk; orc_bytestream_s bs; orc_bytestream_s bs2; int is_string; uint64_t num_child_rows; union { orc_strdict_state_s dict; uint32_t nulls_desc_row; // number of rows processed for nulls. orc_datadec_state_s data; } top; union { orc_rlev1_state_s rlev1; orc_rlev2_state_s rlev2; orc_byterle_state_s rle8; orc_rowdec_state_s rowdec; } u; union values { uint8_t u8[block_size * 16]; uint32_t u32[block_size * 4]; int32_t i32[block_size * 4]; uint64_t u64[block_size * 2]; int64_t i64[block_size * 2]; double f64[block_size * 2]; __int128_t i128[block_size]; __uint128_t u128[block_size]; } vals; }; /** * @brief Initializes byte stream, modifying length and start position to keep the read pointer * 8-byte aligned. * * Assumes that the address range [start_address & ~7, (start_address + len - 1) | 7] * is valid. * * @param[in,out] bs Byte stream input * @param[in] base Pointer to raw byte stream data * @param[in] len Stream length in bytes */ static __device__ void bytestream_init(orc_bytestream_s* bs, uint8_t const* base, uint32_t len) { uint32_t pos = (len > 0) ? static_cast<uint32_t>(7 & reinterpret_cast<size_t>(base)) : 0; bs->base = base - pos; bs->pos = pos; bs->len = (len + pos + 7) & ~7; bs->fill_pos = 0; bs->fill_count = min(bs->len, bytestream_buffer_size) >> 3; } /** * @brief Increment the read position, returns number of 64-bit slots to fill * * @param[in] bs Byte stream input * @param[in] bytes_consumed Number of bytes that were consumed */ static __device__ void bytestream_flush_bytes(orc_bytestream_s* bs, uint32_t bytes_consumed) { uint32_t pos = bs->pos; uint32_t len = bs->len; uint32_t pos_new = min(pos + bytes_consumed, len); bs->pos = pos_new; pos = min(pos + bytestream_buffer_size, len); pos_new = min(pos_new + bytestream_buffer_size, len); bs->fill_pos = pos; bs->fill_count = (pos_new >> 3) - (pos >> 3); } /** * @brief Refill the byte stream buffer * * @param[in] bs Byte stream input * @param[in] t thread id */ static __device__ void bytestream_fill(orc_bytestream_s* bs, int t) { auto const count = bs->fill_count; if (t < count) { auto const pos8 = (bs->fill_pos >> 3) + t; memcpy(&bs->buf.u64[pos8 & ((bytestream_buffer_size >> 3) - 1)], &bs->base[pos8 * sizeof(uint2)], sizeof(uint2)); } } /** * @brief Read a byte from the byte stream (byte aligned) * * @param[in] bs Byte stream input * @param[in] pos Position in byte stream * @return byte */ inline __device__ uint8_t bytestream_readbyte(orc_bytestream_s* bs, int pos) { return bs->buf.u8[pos & (bytestream_buffer_size - 1)]; } /** * @brief Read 32 bits from a byte stream (little endian, byte aligned) * * @param[in] bs Byte stream input * @param[in] pos Position in byte stream * @result bits */ inline __device__ uint32_t bytestream_readu32(orc_bytestream_s* bs, int pos) { uint32_t a = bs->buf.u32[(pos & (bytestream_buffer_size - 1)) >> 2]; uint32_t b = bs->buf.u32[((pos + 4) & (bytestream_buffer_size - 1)) >> 2]; return __funnelshift_r(a, b, (pos & 3) * 8); } /** * @brief Read 64 bits from a byte stream (little endian, byte aligned) * * @param[in] bs Byte stream input * @param[in] pos Position in byte stream * @param[in] numbits number of bits * @return bits */ inline __device__ uint64_t bytestream_readu64(orc_bytestream_s* bs, int pos) { uint32_t a = bs->buf.u32[(pos & (bytestream_buffer_size - 1)) >> 2]; uint32_t b = bs->buf.u32[((pos + 4) & (bytestream_buffer_size - 1)) >> 2]; uint32_t c = bs->buf.u32[((pos + 8) & (bytestream_buffer_size - 1)) >> 2]; uint32_t lo32 = __funnelshift_r(a, b, (pos & 3) * 8); uint32_t hi32 = __funnelshift_r(b, c, (pos & 3) * 8); uint64_t v = hi32; v <<= 32; v |= lo32; return v; } /** * @brief Read up to 32-bits from a byte stream (big endian) * * @param[in] bs Byte stream input * @param[in] bitpos Position in byte stream * @param[in] numbits number of bits * @return decoded value */ inline __device__ uint32_t bytestream_readbits(orc_bytestream_s* bs, int bitpos, uint32_t numbits) { int idx = bitpos >> 5; uint32_t a = __byte_perm(bs->buf.u32[(idx + 0) & bytestream_buffer_mask], 0, 0x0123); uint32_t b = __byte_perm(bs->buf.u32[(idx + 1) & bytestream_buffer_mask], 0, 0x0123); return __funnelshift_l(b, a, bitpos & 0x1f) >> (32 - numbits); } /** * @brief Read up to 64-bits from a byte stream (big endian) * * @param[in] bs Byte stream input * @param[in] bitpos Position in byte stream * @param[in] numbits number of bits * @return decoded value */ inline __device__ uint64_t bytestream_readbits64(orc_bytestream_s* bs, int bitpos, uint32_t numbits) { int idx = bitpos >> 5; uint32_t a = __byte_perm(bs->buf.u32[(idx + 0) & bytestream_buffer_mask], 0, 0x0123); uint32_t b = __byte_perm(bs->buf.u32[(idx + 1) & bytestream_buffer_mask], 0, 0x0123); uint32_t c = __byte_perm(bs->buf.u32[(idx + 2) & bytestream_buffer_mask], 0, 0x0123); uint32_t hi32 = __funnelshift_l(b, a, bitpos & 0x1f); uint32_t lo32 = __funnelshift_l(c, b, bitpos & 0x1f); uint64_t v = hi32; v <<= 32; v |= lo32; v >>= (64 - numbits); return v; } /** * @brief Decode a big-endian unsigned 32-bit value * * @param[in] bs Byte stream input * @param[in] bitpos Position in byte stream * @param[in] numbits number of bits * @param[out] result decoded value */ inline __device__ void bytestream_readbe(orc_bytestream_s* bs, int bitpos, uint32_t numbits, uint32_t& result) { result = bytestream_readbits(bs, bitpos, numbits); } /** * @brief Decode a big-endian signed 32-bit value * * @param[in] bs Byte stream input * @param[in] bitpos Position in byte stream * @param[in] numbits number of bits * @param[out] result decoded value */ inline __device__ void bytestream_readbe(orc_bytestream_s* bs, int bitpos, uint32_t numbits, int32_t& result) { uint32_t u = bytestream_readbits(bs, bitpos, numbits); result = (int32_t)((u >> 1u) ^ -(int32_t)(u & 1)); } /** * @brief Decode a big-endian unsigned 64-bit value * * @param[in] bs Byte stream input * @param[in] bitpos Position in byte stream * @param[in] numbits number of bits * @param[out] result decoded value */ inline __device__ void bytestream_readbe(orc_bytestream_s* bs, int bitpos, uint32_t numbits, uint64_t& result) { result = bytestream_readbits64(bs, bitpos, numbits); } /** * @brief Decode a big-endian signed 64-bit value * * @param[in] bs Byte stream input * @param[in] bitpos Position in byte stream * @param[in] numbits number of bits * @param[out] result decoded value */ inline __device__ void bytestream_readbe(orc_bytestream_s* bs, int bitpos, uint32_t numbits, int64_t& result) { uint64_t u = bytestream_readbits64(bs, bitpos, numbits); result = (int64_t)((u >> 1u) ^ -(int64_t)(u & 1)); } /** * @brief Return the length of a base-128 varint * * @param[in] bs Byte stream input * @param[in] pos Position in circular byte stream buffer * @return length of varint in bytes */ template <class T> inline __device__ uint32_t varint_length(orc_bytestream_s* bs, int pos) { if (bytestream_readbyte(bs, pos) > 0x7f) { uint32_t next32 = bytestream_readu32(bs, pos + 1); uint32_t zbit = __ffs((~next32) & 0x8080'8080); if (sizeof(T) <= 4 || zbit) { return 1 + (zbit >> 3); // up to 5x7 bits } else { next32 = bytestream_readu32(bs, pos + 5); zbit = __ffs((~next32) & 0x8080'8080); if (zbit) { return 5 + (zbit >> 3); // up to 9x7 bits } else if ((sizeof(T) <= 8) || (bytestream_readbyte(bs, pos + 9) <= 0x7f)) { return 10; // up to 70 bits } else { uint64_t next64 = bytestream_readu64(bs, pos + 10); zbit = __ffsll((~next64) & 0x8080'8080'8080'8080ull); if (zbit) { return 10 + (zbit >> 3); // Up to 18x7 bits (126) } else { return 19; // Up to 19x7 bits (133) } } } } else { return 1; } } /** * @brief Decodes a base-128 varint * * @param[in] bs Byte stream input * @param[in] pos Position in circular byte stream buffer * @param[in] result Unpacked value * @return new position in byte stream buffer */ template <class T> inline __device__ int decode_base128_varint(orc_bytestream_s* bs, int pos, T& result) { uint32_t v = bytestream_readbyte(bs, pos++); if (v > 0x7f) { uint32_t b = bytestream_readbyte(bs, pos++); v = (v & 0x7f) | (b << 7); if (b > 0x7f) { b = bytestream_readbyte(bs, pos++); v = (v & 0x3fff) | (b << 14); if (b > 0x7f) { b = bytestream_readbyte(bs, pos++); v = (v & 0x1f'ffff) | (b << 21); if (b > 0x7f) { b = bytestream_readbyte(bs, pos++); v = (v & 0x0fff'ffff) | (b << 28); if constexpr (sizeof(T) > 4) { uint32_t lo = v; uint64_t hi; v = b >> 4; if (b > 0x7f) { b = bytestream_readbyte(bs, pos++); v = (v & 7) | (b << 3); if (b > 0x7f) { b = bytestream_readbyte(bs, pos++); v = (v & 0x3ff) | (b << 10); if (b > 0x7f) { b = bytestream_readbyte(bs, pos++); v = (v & 0x1'ffff) | (b << 17); if (b > 0x7f) { b = bytestream_readbyte(bs, pos++); v = (v & 0xff'ffff) | (b << 24); if (b > 0x7f) { pos++; // last bit is redundant (extra byte implies bit63 is 1) } } } } } hi = v; hi <<= 32; result = hi | lo; return pos; } } } } } result = v; return pos; } /** * @brief Decodes a signed int128 encoded as base-128 varint (used for decimals) */ inline __device__ __int128_t decode_varint128(orc_bytestream_s* bs, int pos) { auto byte = bytestream_readbyte(bs, pos++); __int128_t const sign_mask = -(int32_t)(byte & 1); __int128_t value = (byte >> 1) & 0x3f; uint32_t bitpos = 6; while (byte & 0x80 && bitpos < 128) { byte = bytestream_readbyte(bs, pos++); value |= ((__uint128_t)(byte & 0x7f)) << bitpos; bitpos += 7; } return value ^ sign_mask; } /** * @brief Decodes an unsigned 32-bit varint */ inline __device__ int decode_varint(orc_bytestream_s* bs, int pos, uint32_t& result) { uint32_t u; pos = decode_base128_varint<uint32_t>(bs, pos, u); result = u; return pos; } /** * @brief Decodes an unsigned 64-bit varint */ inline __device__ int decode_varint(orc_bytestream_s* bs, int pos, uint64_t& result) { uint64_t u; pos = decode_base128_varint<uint64_t>(bs, pos, u); result = u; return pos; } /** * @brief Signed version of 32-bit decode_varint */ inline __device__ int decode_varint(orc_bytestream_s* bs, int pos, int32_t& result) { uint32_t u; pos = decode_base128_varint<uint32_t>(bs, pos, u); result = (int32_t)((u >> 1u) ^ -(int32_t)(u & 1)); return pos; } /** * @brief Signed version of 64-bit decode_varint */ inline __device__ int decode_varint(orc_bytestream_s* bs, int pos, int64_t& result) { uint64_t u; pos = decode_base128_varint<uint64_t>(bs, pos, u); result = (int64_t)((u >> 1u) ^ -(int64_t)(u & 1)); return pos; } /** * @brief In-place conversion from lengths to positions * * @param[in] vals input values * @param[in] numvals number of values * @param[in] t thread id * * @return number of values decoded */ template <class T> inline __device__ void lengths_to_positions(T* vals, uint32_t numvals, unsigned int t) { for (uint32_t n = 1; n < numvals; n <<= 1) { __syncthreads(); if ((t & n) && (t < numvals)) vals[t] += vals[(t & ~n) | (n - 1)]; } } /** * @brief ORC Integer RLEv1 decoding * * @param[in] bs input byte stream * @param[in] rle RLE state * @param[in] vals buffer for output values (uint32_t, int32_t, uint64_t or int64_t) * @param[in] maxvals maximum number of values to decode * @param[in] t thread id * * @return number of values decoded */ template <class T> static __device__ uint32_t Integer_RLEv1(orc_bytestream_s* bs, orc_rlev1_state_s* rle, T* vals, uint32_t maxvals, int t) { uint32_t numvals, numruns; if (t == 0) { uint32_t maxpos = min(bs->len, bs->pos + (bytestream_buffer_size - 8u)); uint32_t lastpos = bs->pos; numvals = numruns = 0; // Find the length and start location of each run while (numvals < maxvals && numruns < num_warps * 12) { uint32_t pos = lastpos; uint32_t n = bytestream_readbyte(bs, pos++); if (n <= 0x7f) { // Run int32_t delta; n = n + 3; if (numvals + n > maxvals) break; delta = bytestream_readbyte(bs, pos++); vals[numvals] = pos & 0xffff; pos += varint_length<T>(bs, pos); if (pos > maxpos) break; rle->run_data[numruns++] = (delta << 24) | (n << 16) | numvals; numvals += n; } else { // Literals uint32_t i; n = 0x100 - n; if (numvals + n > maxvals) break; i = 0; do { vals[numvals + i] = pos & 0xffff; pos += varint_length<T>(bs, pos); } while (++i < n); if (pos > maxpos) break; numvals += n; } lastpos = pos; } rle->num_runs = numruns; rle->num_vals = numvals; bytestream_flush_bytes(bs, lastpos - bs->pos); } __syncthreads(); // Expand the runs numruns = rle->num_runs; if (numruns > 0) { int r = t >> 5; int tr = t & 0x1f; for (uint32_t run = r; run < numruns; run += num_warps) { int32_t run_data = rle->run_data[run]; int n = (run_data >> 16) & 0xff; int delta = run_data >> 24; uint32_t base = run_data & 0x3ff; uint32_t pos = vals[base] & 0xffff; for (int i = 1 + tr; i < n; i += 32) { vals[base + i] = ((delta * i) << 16) | pos; } } __syncthreads(); } numvals = rle->num_vals; // Decode individual 32-bit varints if (t < numvals) { int32_t pos = vals[t]; int32_t delta = pos >> 16; T v; decode_varint(bs, pos, v); vals[t] = v + delta; } __syncthreads(); return numvals; } /** * @brief Maps the RLEv2 5-bit length code to 6-bit length */ static const __device__ __constant__ uint8_t kRLEv2_W[32] = { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 26, 28, 30, 32, 40, 48, 56, 64}; /** * @brief Maps the RLEv2 patch size (pw + pgw) to number of bits * * Patch size (in bits) is only allowed to be from the below set. If `pw + pgw == 34` then the size * of the patch in the file is the smallest size in the set that can fit 34 bits i.e. * `ClosestFixedBitsMap[34] == 40` * * @see https://github.com/apache/orc/commit/9faf7f5147a7bc69 */ static const __device__ __constant__ uint8_t ClosestFixedBitsMap[65] = { 1, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 26, 26, 28, 28, 30, 30, 32, 32, 40, 40, 40, 40, 40, 40, 40, 40, 48, 48, 48, 48, 48, 48, 48, 48, 56, 56, 56, 56, 56, 56, 56, 56, 64, 64, 64, 64, 64, 64, 64, 64}; /** * @brief ORC Integer RLEv2 decoding * * @param[in] bs input byte stream * @param[in] rle RLE state * @param[in] vals buffer for output values (uint32_t, int32_t, uint64_t or int64_t) * @param[in] maxvals maximum number of values to decode * @param[in] t thread id * @param[in] has_buffered_values If true, means there are already buffered values * * @return number of values decoded */ template <class T> static __device__ uint32_t Integer_RLEv2(orc_bytestream_s* bs, orc_rlev2_state_s* rle, T* vals, uint32_t maxvals, int t, bool has_buffered_values = false) { if (t == 0) { uint32_t maxpos = min(bs->len, bs->pos + (bytestream_buffer_size - 8u)); uint32_t lastpos = bs->pos; auto numvals = 0; auto numruns = 0; // Find the length and start location of each run while (numvals < maxvals) { uint32_t pos = lastpos; uint32_t byte0 = bytestream_readbyte(bs, pos++); uint32_t n, l; int mode = byte0 >> 6; rle->runs_loc[numruns] = numvals; vals[numvals] = lastpos; if (mode == 0) { // 00lllnnn: short repeat encoding l = 1 + ((byte0 >> 3) & 7); // 1 to 8 bytes n = 3 + (byte0 & 7); // 3 to 10 values } else { l = kRLEv2_W[(byte0 >> 1) & 0x1f]; n = 1 + ((byte0 & 1) << 8) + bytestream_readbyte(bs, pos++); if (mode == 1) { // 01wwwwwn.nnnnnnnn: direct encoding l = (l * n + 7) >> 3; } else if (mode == 2) { // 10wwwwwn.nnnnnnnn.xxxxxxxx.yyyyyyyy: patched base encoding uint32_t byte2 = bytestream_readbyte(bs, pos++); uint32_t byte3 = bytestream_readbyte(bs, pos++); uint32_t bw = 1 + (byte2 >> 5); // base value width, 1 to 8 bytes uint32_t pw = kRLEv2_W[byte2 & 0x1f]; // patch width, 1 to 64 bits uint32_t pgw = 1 + (byte3 >> 5); // patch gap width, 1 to 8 bits uint32_t pgw_pw_len = ClosestFixedBitsMap[min(pw + pgw, 64u)]; // ceiled patch width uint32_t pll = byte3 & 0x1f; // patch list length l = (l * n + 7) >> 3; l += bw; l += (pll * (pgw_pw_len) + 7) >> 3; } else { // 11wwwwwn.nnnnnnnn.<base>.<delta>: delta encoding uint32_t deltapos = varint_length<T>(bs, pos); deltapos += varint_length<T>(bs, pos + deltapos); l = (l > 1 && n > 2) ? (l * (n - 2) + 7) >> 3 : 0; l += deltapos; } } if ((numvals != 0) and (numvals + n > maxvals)) break; // case where there are buffered values and can't consume a whole chunk // from decoded values, so skip adding any more to buffer, work on buffered values and then // start fresh in next iteration with empty buffer. if ((numvals == 0) and (n > maxvals) and (has_buffered_values)) break; pos += l; if (pos > maxpos) break; ((numvals == 0) and (n > maxvals)) ? numvals = maxvals : numvals += n; lastpos = pos; numruns++; } rle->num_vals = numvals; rle->num_runs = numruns; bytestream_flush_bytes(bs, lastpos - bs->pos); } __syncthreads(); // Process the runs, 1 warp per run auto const numruns = rle->num_runs; auto const r = t >> 5; auto const tr = t & 0x1f; for (uint32_t run = r; run < numruns; run += num_warps) { uint32_t base, pos, w, n; int mode; if (tr == 0) { uint32_t byte0; base = rle->runs_loc[run]; pos = vals[base]; byte0 = bytestream_readbyte(bs, pos++); mode = byte0 >> 6; if (mode == 0) { T baseval; // 00lllnnn: short repeat encoding w = 8 + (byte0 & 0x38); // 8 to 64 bits n = 3 + (byte0 & 7); // 3 to 10 values bytestream_readbe(bs, pos * 8, w, baseval); if constexpr (sizeof(T) <= 4) { rle->baseval.u32[r] = baseval; } else { rle->baseval.u64[r] = baseval; } } else { w = kRLEv2_W[(byte0 >> 1) & 0x1f]; n = 1 + ((byte0 & 1) << 8) + bytestream_readbyte(bs, pos++); if (mode > 1) { if (mode == 2) { // Patched base uint32_t byte2 = bytestream_readbyte(bs, pos++); uint32_t byte3 = bytestream_readbyte(bs, pos++); uint32_t bw = 1 + (byte2 >> 5); // base value width, 1 to 8 bytes uint32_t pw = kRLEv2_W[byte2 & 0x1f]; // patch width, 1 to 64 bits if constexpr (sizeof(T) <= 4) { uint32_t baseval; bytestream_readbe(bs, pos * 8, bw * 8, baseval); uint32_t const mask = (1u << (bw * 8 - 1)) - 1; // Negative values are represented with the highest bit set to 1 rle->baseval.u32[r] = (std::is_signed_v<T> and baseval > mask) ? -static_cast<int32_t>(baseval & mask) : baseval; } else { uint64_t baseval; bytestream_readbe(bs, pos * 8, bw * 8, baseval); uint64_t const mask = (1ul << (bw * 8 - 1)) - 1; // Negative values are represented with the highest bit set to 1 rle->baseval.u64[r] = (std::is_signed_v<T> and baseval > mask) ? -static_cast<int64_t>(baseval & mask) : baseval; } rle->m2_pw_byte3[r] = (pw << 8) | byte3; pos += bw; } else { T baseval; int64_t delta; // Delta pos = decode_varint(bs, pos, baseval); if constexpr (sizeof(T) <= 4) { rle->baseval.u32[r] = baseval; } else { rle->baseval.u64[r] = baseval; } pos = decode_varint(bs, pos, delta); rle->delta[r] = delta; } } } } base = shuffle(base); mode = shuffle(mode); pos = shuffle(pos); n = shuffle(n); w = shuffle(w); __syncwarp(); // Not required, included to fix the racecheck warning for (uint32_t i = tr; i < n; i += 32) { if constexpr (sizeof(T) <= 4) { if (mode == 0) { vals[base + i] = rle->baseval.u32[r]; } else if (mode == 1) { T v; bytestream_readbe(bs, pos * 8 + i * w, w, v); vals[base + i] = v; } else if (mode == 2) { uint32_t ofs = bytestream_readbits(bs, pos * 8 + i * w, w); vals[base + i] = rle->baseval.u32[r] + ofs; } else { int64_t delta = rle->delta[r]; if (w > 1 && i > 1) { int32_t delta_s = (delta < 0) ? -1 : 0; vals[base + i] = (bytestream_readbits(bs, pos * 8 + (i - 2) * w, w) ^ delta_s) - delta_s; } else { vals[base + i] = (i == 0) ? 0 : static_cast<uint32_t>(delta); } } } else { if (mode == 0) { vals[base + i] = rle->baseval.u64[r]; } else if (mode == 1) { T v; bytestream_readbe(bs, pos * 8 + i * w, w, v); vals[base + i] = v; } else if (mode == 2) { uint64_t ofs = bytestream_readbits64(bs, pos * 8 + i * w, w); vals[base + i] = rle->baseval.u64[r] + ofs; } else { int64_t delta = rle->delta[r], ofs; if (w > 1 && i > 1) { int64_t delta_s = (delta < 0) ? -1 : 0; ofs = (bytestream_readbits64(bs, pos * 8 + (i - 2) * w, w) ^ delta_s) - delta_s; } else { ofs = (i == 0) ? 0 : delta; } vals[base + i] = ofs; } } } __syncwarp(); // Patch values if (mode == 2) { uint32_t pw_byte3 = rle->m2_pw_byte3[r]; uint32_t pw = pw_byte3 >> 8; uint32_t pgw = 1 + ((pw_byte3 >> 5) & 7); // patch gap width, 1 to 8 bits uint32_t pll = pw_byte3 & 0x1f; // patch list length if (pll != 0) { uint32_t pgw_pw_len = ClosestFixedBitsMap[min(pw + pgw, 64u)]; uint64_t patch_pos64 = (tr < pll) ? bytestream_readbits64( bs, pos * 8 + ((n * w + 7) & ~7) + tr * (pgw_pw_len), pgw_pw_len) : 0; uint32_t patch_pos; T patch = 1; patch <<= pw; patch = (patch - 1) & (T)patch_pos64; patch <<= w; patch_pos = (uint32_t)(patch_pos64 >> pw); for (uint32_t k = 1; k < pll; k <<= 1) { uint32_t tmp = shuffle(patch_pos, (tr & ~k) | (k - 1)); patch_pos += (tr & k) ? tmp : 0; } if (tr < pll && patch_pos < n) { vals[base + patch_pos] += patch; } } } __syncwarp(); if (mode == 3) { T baseval; for (uint32_t i = 1; i < n; i <<= 1) { __syncwarp(); for (uint32_t j = tr; j < n; j += 32) { if (j & i) vals[base + j] += vals[base + ((j & ~i) | (i - 1))]; } } if constexpr (sizeof(T) <= 4) baseval = rle->baseval.u32[r]; else baseval = rle->baseval.u64[r]; for (uint32_t j = tr; j < n; j += 32) { vals[base + j] += baseval; } } __syncwarp(); } __syncthreads(); return rle->num_vals; } /** * @brief Reads 32 booleans as a packed 32-bit value * * @param[in] vals 32-bit array of values (little-endian) * @param[in] bitpos bit position * * @return 32-bit value */ inline __device__ uint32_t rle8_read_bool32(uint32_t* vals, uint32_t bitpos) { uint32_t a = vals[(bitpos >> 5) + 0]; uint32_t b = vals[(bitpos >> 5) + 1]; a = __byte_perm(a, 0, 0x0123); b = __byte_perm(b, 0, 0x0123); return __brev(__funnelshift_l(b, a, bitpos)); } /** * @brief ORC Byte RLE decoding * * @param[in] bs Input byte stream * @param[in] rle RLE state * @param[in] vals output buffer for decoded 8-bit values * @param[in] maxvals Maximum number of values to decode * @param[in] t thread id * * @return number of values decoded */ static __device__ uint32_t Byte_RLE(orc_bytestream_s* bs, orc_byterle_state_s* rle, uint8_t* vals, uint32_t maxvals, int t) { uint32_t numvals, numruns; int r, tr; if (t == 0) { uint32_t maxpos = min(bs->len, bs->pos + (bytestream_buffer_size - 8u)); uint32_t lastpos = bs->pos; numvals = numruns = 0; // Find the length and start location of each run while (numvals < maxvals && numruns < num_warps) { uint32_t pos = lastpos, n; rle->runs_pos[numruns] = pos; rle->runs_loc[numruns] = numvals; n = bytestream_readbyte(bs, pos++); if (n <= 0x7f) { // Run n = n + 3; pos++; } else { // Literals n = 0x100 - n; pos += n; } if ((numvals != 0) and (numvals + n > maxvals)) break; if (pos > maxpos) break; numruns++; ((numvals == 0) and (n > maxvals)) ? numvals = maxvals : numvals += n; lastpos = pos; } rle->num_runs = numruns; rle->num_vals = numvals; bytestream_flush_bytes(bs, lastpos - bs->pos); } __syncthreads(); numruns = rle->num_runs; r = t >> 5; tr = t & 0x1f; for (int run = r; run < numruns; run += num_warps) { uint32_t pos = rle->runs_pos[run]; uint32_t loc = rle->runs_loc[run]; uint32_t n = bytestream_readbyte(bs, pos++); uint32_t literal_mask; if (n <= 0x7f) { literal_mask = 0; n += 3; } else { literal_mask = ~0; n = 0x100 - n; } for (uint32_t i = tr; i < n; i += 32) { vals[loc + i] = bytestream_readbyte(bs, pos + (i & literal_mask)); } } __syncthreads(); return rle->num_vals; } static const __device__ __constant__ int64_t kPow5i[28] = {1, 5, 25, 125, 625, 3125, 15625, 78125, 390625, 1953125, 9765625, 48828125, 244140625, 1220703125, 6103515625ll, 30517578125ll, 152587890625ll, 762939453125ll, 3814697265625ll, 19073486328125ll, 95367431640625ll, 476837158203125ll, 2384185791015625ll, 11920928955078125ll, 59604644775390625ll, 298023223876953125ll, 1490116119384765625ll, 7450580596923828125ll}; /** * @brief ORC Decimal decoding (unbounded base-128 varints) * * @param[in] bs Input byte stream * @param[in,out] vals on input: scale from secondary stream, on output: value * @param[in] val_scale Scale of each value * @param[in] col_scale Scale from schema to which value will be adjusted * @param[in] numvals Number of values to decode * @param[in] t thread id * * @return number of values decoded */ static __device__ int Decode_Decimals(orc_bytestream_s* bs, orc_byterle_state_s* scratch, orcdec_state_s::values& vals, int val_scale, int numvals, type_id dtype_id, int col_scale, int t) { uint32_t num_vals_read = 0; // Iterates till `numvals` are read or there is nothing to read once the // stream has reached its end, and can't read anything more. while (num_vals_read != numvals) { if (t == 0) { uint32_t maxpos = min(bs->len, bs->pos + (bytestream_buffer_size - 8u)); uint32_t lastpos = bs->pos; uint32_t n; for (n = num_vals_read; n < numvals; n++) { uint32_t pos = lastpos; pos += varint_length<uint4>(bs, pos); if (pos > maxpos) break; vals.i64[2 * n] = lastpos; lastpos = pos; } scratch->num_vals = n; bytestream_flush_bytes(bs, lastpos - bs->pos); } __syncthreads(); uint32_t num_vals_to_read = scratch->num_vals; if (t >= num_vals_read and t < num_vals_to_read) { auto const pos = static_cast<int>(vals.i64[2 * t]); __int128_t v = decode_varint128(bs, pos); auto const scaled_value = [&]() { // Since cuDF column stores just one scale, value needs to be adjusted to col_scale from // val_scale. So the difference of them will be used to add 0s or remove digits. int32_t const scale = (t < numvals) ? col_scale - val_scale : 0; if (scale >= 0) { auto const abs_scale = min(scale, 27); return (v * kPow5i[abs_scale]) << abs_scale; } else { auto const abs_scale = min(-scale, 27); return (v / kPow5i[abs_scale]) >> abs_scale; } }(); if (dtype_id == type_id::DECIMAL32) { vals.i32[t] = scaled_value; } else if (dtype_id == type_id::DECIMAL64) { vals.i64[t] = scaled_value; } else { vals.i128[t] = scaled_value; } } // There is nothing to read, so break if (num_vals_read == num_vals_to_read) break; // Update number of values read (This contains values of previous iteration) num_vals_read = num_vals_to_read; // Have to wait till all threads have copied data __syncthreads(); if (num_vals_read != numvals) { bytestream_fill(bs, t); __syncthreads(); if (t == 0) { // Needs to be reset since bytestream has been filled bs->fill_count = 0; } } // Adding to get all threads in sync before next read __syncthreads(); } return num_vals_read; } /** * @brief Decoding NULLs and builds string dictionary index tables * * @param[in] chunks ColumnDesc device array [stripe][column] * @param[in] global_dictionary Global dictionary device array * @param[in] num_columns Number of columns * @param[in] num_stripes Number of stripes * @param[in] max_num_rows Maximum number of rows to load * @param[in] first_row Crop all rows below first_row */ // blockDim {block_size,1,1} template <int block_size> __global__ void __launch_bounds__(block_size) gpuDecodeNullsAndStringDictionaries(ColumnDesc* chunks, DictionaryEntry* global_dictionary, uint32_t num_columns, uint32_t num_stripes, size_t first_row) { __shared__ __align__(16) orcdec_state_s state_g; using warp_reduce = cub::WarpReduce<uint32_t>; using block_reduce = cub::BlockReduce<uint32_t, block_size>; __shared__ union { typename warp_reduce::TempStorage wr_storage[block_size / 32]; typename block_reduce::TempStorage bk_storage; } temp_storage; orcdec_state_s* const s = &state_g; bool const is_nulldec = (blockIdx.y >= num_stripes); uint32_t const column = blockIdx.x; uint32_t const stripe = (is_nulldec) ? blockIdx.y - num_stripes : blockIdx.y; uint32_t const chunk_id = stripe * num_columns + column; int t = threadIdx.x; if (t == 0) s->chunk = chunks[chunk_id]; __syncthreads(); size_t const max_num_rows = s->chunk.column_num_rows - s->chunk.parent_validity_info.null_count; if (is_nulldec) { uint32_t null_count = 0; // Decode NULLs if (t == 0) { s->chunk.skip_count = 0; s->top.nulls_desc_row = 0; bytestream_init(&s->bs, s->chunk.streams[CI_PRESENT], s->chunk.strm_len[CI_PRESENT]); } __syncthreads(); if (s->chunk.strm_len[CI_PRESENT] == 0) { // No present stream: all rows are valid s->vals.u32[t] = ~0; } auto const prev_parent_null_count = (s->chunk.parent_null_count_prefix_sums != nullptr && stripe > 0) ? s->chunk.parent_null_count_prefix_sums[stripe - 1] : 0; auto const parent_null_count = (s->chunk.parent_null_count_prefix_sums != nullptr) ? s->chunk.parent_null_count_prefix_sums[stripe] - prev_parent_null_count : 0; auto const num_elems = s->chunk.num_rows - parent_null_count; while (s->top.nulls_desc_row < num_elems) { uint32_t nrows_max = min(num_elems - s->top.nulls_desc_row, blockDim.x * 32); uint32_t nrows; size_t row_in; bytestream_fill(&s->bs, t); __syncthreads(); if (s->chunk.strm_len[CI_PRESENT] > 0) { uint32_t nbytes = Byte_RLE(&s->bs, &s->u.rle8, s->vals.u8, (nrows_max + 7) >> 3, t); nrows = min(nrows_max, nbytes * 8u); if (!nrows) { // Error: mark all remaining rows as null nrows = nrows_max; if (t * 32 < nrows) { s->vals.u32[t] = 0; } } } else { nrows = nrows_max; } __syncthreads(); row_in = s->chunk.start_row + s->top.nulls_desc_row - prev_parent_null_count; if (row_in + nrows > first_row && row_in < first_row + max_num_rows && s->chunk.valid_map_base != nullptr) { int64_t dst_row = row_in - first_row; int64_t dst_pos = max(dst_row, (int64_t)0); uint32_t startbit = -static_cast<int32_t>(min(dst_row, (int64_t)0)); uint32_t nbits = nrows - min(startbit, nrows); uint32_t* valid = s->chunk.valid_map_base + (dst_pos >> 5); uint32_t bitpos = static_cast<uint32_t>(dst_pos) & 0x1f; if ((size_t)(dst_pos + nbits) > max_num_rows) { nbits = static_cast<uint32_t>(max_num_rows - min((size_t)dst_pos, max_num_rows)); } // Store bits up to the next 32-bit aligned boundary if (bitpos != 0) { uint32_t n = min(32u - bitpos, nbits); if (t == 0) { uint32_t mask = ((1 << n) - 1) << bitpos; uint32_t bits = (rle8_read_bool32(s->vals.u32, startbit) << bitpos) & mask; atomicAnd(valid, ~mask); atomicOr(valid, bits); null_count += __popc((~bits) & mask); } nbits -= n; startbit += n; valid++; } // Store bits aligned if (t * 32 + 32 <= nbits) { uint32_t bits = rle8_read_bool32(s->vals.u32, startbit + t * 32); valid[t] = bits; null_count += __popc(~bits); } else if (t * 32 < nbits) { uint32_t n = nbits - t * 32; uint32_t mask = (1 << n) - 1; uint32_t bits = rle8_read_bool32(s->vals.u32, startbit + t * 32) & mask; atomicAnd(valid + t, ~mask); atomicOr(valid + t, bits); null_count += __popc((~bits) & mask); } __syncthreads(); } // We may have some valid values that are not decoded below first_row -> count these in // skip_count, so that subsequent kernel can infer the correct row position if (row_in < first_row && t < 32) { uint32_t skippedrows = min(static_cast<uint32_t>(first_row - row_in), nrows); uint32_t skip_count = 0; for (thread_index_type i = t * 32; i < skippedrows; i += 32 * 32) { // Need to arrange the bytes to apply mask properly. uint32_t bits = (i + 32 <= skippedrows) ? s->vals.u32[i >> 5] : (__byte_perm(s->vals.u32[i >> 5], 0, 0x0123) & (0xffff'ffffu << (0x20 - skippedrows + i))); skip_count += __popc(bits); } skip_count = warp_reduce(temp_storage.wr_storage[t / 32]).Sum(skip_count); if (t == 0) { s->chunk.skip_count += skip_count; } } __syncthreads(); if (t == 0) { s->top.nulls_desc_row += nrows; } __syncthreads(); } __syncthreads(); // Sum up the valid counts and infer null_count null_count = block_reduce(temp_storage.bk_storage).Sum(null_count); if (t == 0) { chunks[chunk_id].null_count = parent_null_count + null_count; chunks[chunk_id].skip_count = s->chunk.skip_count; } } else { // Decode string dictionary int encoding_kind = s->chunk.encoding_kind; if ((encoding_kind == DICTIONARY || encoding_kind == DICTIONARY_V2) && (s->chunk.dict_len > 0)) { if (t == 0) { s->top.dict.dict_len = s->chunk.dict_len; s->top.dict.local_dict = global_dictionary + s->chunk.dictionary_start; // Local dictionary s->top.dict.dict_pos = 0; // CI_DATA2 contains the LENGTH stream coding the length of individual dictionary entries bytestream_init(&s->bs, s->chunk.streams[CI_DATA2], s->chunk.strm_len[CI_DATA2]); } __syncthreads(); while (s->top.dict.dict_len > 0) { uint32_t numvals = min(s->top.dict.dict_len, blockDim.x), len; uint32_t* vals = s->vals.u32; bytestream_fill(&s->bs, t); __syncthreads(); if (is_rlev1(s->chunk.encoding_kind)) { numvals = Integer_RLEv1(&s->bs, &s->u.rlev1, vals, numvals, t); } else // RLEv2 { numvals = Integer_RLEv2(&s->bs, &s->u.rlev2, vals, numvals, t); } __syncthreads(); len = (t < numvals) ? vals[t] : 0; lengths_to_positions(vals, numvals, t); __syncthreads(); if (numvals == 0) { // This is an error (ran out of data) numvals = min(s->top.dict.dict_len, blockDim.x); vals[t] = 0; } if (t < numvals) { s->top.dict.local_dict[t] = {s->top.dict.dict_pos + vals[t] - len, len}; } __syncthreads(); if (t == 0) { s->top.dict.dict_pos += vals[numvals - 1]; s->top.dict.dict_len -= numvals; s->top.dict.local_dict += numvals; } __syncthreads(); } } } } /** * @brief Decode row positions from valid bits * * @param[in,out] s Column chunk decoder state * @param[in] first_row crop all rows below first rows * @param[in] t thread id * @param[in] temp_storage shared memory storage to perform block reduce */ template <typename Storage> static __device__ void DecodeRowPositions(orcdec_state_s* s, size_t first_row, int t, Storage& temp_storage) { using block_reduce = cub::BlockReduce<uint32_t, block_size>; if (t == 0) { if (s->chunk.skip_count != 0) { s->u.rowdec.nz_count = min(min(s->chunk.skip_count, s->top.data.max_vals), blockDim.x); s->chunk.skip_count -= s->u.rowdec.nz_count; s->top.data.nrows = s->u.rowdec.nz_count; } else { s->u.rowdec.nz_count = 0; } } __syncthreads(); if (t < s->u.rowdec.nz_count) { s->u.rowdec.row[t] = 0; // Skipped values (below first_row) } while (s->u.rowdec.nz_count < s->top.data.max_vals && s->top.data.cur_row + s->top.data.nrows < s->top.data.end_row) { uint32_t nrows = min(s->top.data.end_row - (s->top.data.cur_row + s->top.data.nrows), min((row_decoder_buffer_size - s->u.rowdec.nz_count) * 2, blockDim.x)); if (s->chunk.valid_map_base != nullptr) { // We have a present stream uint32_t rmax = s->top.data.end_row - min((uint32_t)first_row, s->top.data.end_row); auto r = (uint32_t)(s->top.data.cur_row + s->top.data.nrows + t - first_row); uint32_t valid = (t < nrows && r < rmax) ? (((uint8_t const*)s->chunk.valid_map_base)[r >> 3] >> (r & 7)) & 1 : 0; auto* row_ofs_plus1 = (uint16_t*)&s->u.rowdec.row[s->u.rowdec.nz_count]; uint32_t nz_pos, row_plus1, nz_count = s->u.rowdec.nz_count, last_row; if (t < nrows) { row_ofs_plus1[t] = valid; } lengths_to_positions<uint16_t>(row_ofs_plus1, nrows, t); if (t < nrows) { nz_count += row_ofs_plus1[t]; row_plus1 = s->top.data.nrows + t + 1; } else { row_plus1 = 0; } if (t == nrows - 1) { s->u.rowdec.nz_count = min(nz_count, s->top.data.max_vals); } __syncthreads(); // TBD: Brute-forcing this, there might be a more efficient way to find the thread with the // last row last_row = (nz_count == s->u.rowdec.nz_count) ? row_plus1 : 0; last_row = block_reduce(temp_storage).Reduce(last_row, cub::Max()); nz_pos = (valid) ? nz_count : 0; if (t == 0) { s->top.data.nrows = last_row; } if (valid && nz_pos - 1 < s->u.rowdec.nz_count) { s->u.rowdec.row[nz_pos - 1] = row_plus1; } __syncthreads(); } else { // All values are valid nrows = min(nrows, s->top.data.max_vals - s->u.rowdec.nz_count); if (t < nrows) { s->u.rowdec.row[s->u.rowdec.nz_count + t] = s->top.data.nrows + t + 1; } __syncthreads(); if (t == 0) { s->top.data.nrows += nrows; s->u.rowdec.nz_count += nrows; } __syncthreads(); } } } /** * @brief Trailing zeroes for decoding timestamp nanoseconds */ static const __device__ __constant__ uint32_t kTimestampNanoScale[8] = { 1, 100, 1000, 10000, 100000, 1000000, 10000000, 100000000}; /** * @brief Decodes column data * * @param[in] chunks ColumnDesc device array * @param[in] global_dictionary Global dictionary device array * @param[in] tz_table Timezone translation table * @param[in] row_groups Optional row index data * @param[in] first_row Crop all rows below first_row * @param[in] rowidx_stride Row index stride * @param[in] level nesting level being processed */ // blockDim {block_size,1,1} template <int block_size> __global__ void __launch_bounds__(block_size) gpuDecodeOrcColumnData(ColumnDesc* chunks, DictionaryEntry* global_dictionary, table_device_view tz_table, device_2dspan<RowGroup> row_groups, size_t first_row, uint32_t rowidx_stride, size_t level, size_type* error_count) { __shared__ __align__(16) orcdec_state_s state_g; using block_reduce = cub::BlockReduce<uint64_t, block_size>; __shared__ union { typename cub::BlockReduce<uint32_t, block_size>::TempStorage blk_uint32; typename cub::BlockReduce<uint64_t, block_size>::TempStorage blk_uint64; } temp_storage; orcdec_state_s* const s = &state_g; uint32_t chunk_id; int t = threadIdx.x; auto num_rowgroups = row_groups.size().first; if (num_rowgroups > 0) { if (t == 0) { s->top.data.index = row_groups[blockIdx.y][blockIdx.x]; } __syncthreads(); chunk_id = s->top.data.index.chunk_id; } else { chunk_id = blockIdx.x; } if (t == 0) { s->chunk = chunks[chunk_id]; s->num_child_rows = 0; } __syncthreads(); // Struct doesn't have any data in itself, so skip bool const is_valid = s->chunk.type_kind != STRUCT; size_t const max_num_rows = s->chunk.column_num_rows; if (t == 0 and is_valid) { // If we have an index, seek to the initial run and update row positions if (num_rowgroups > 0) { if (s->top.data.index.strm_offset[0] > s->chunk.strm_len[CI_DATA]) { atomicAdd(error_count, 1); } if (s->top.data.index.strm_offset[1] > s->chunk.strm_len[CI_DATA2]) { atomicAdd(error_count, 1); } uint32_t ofs0 = min(s->top.data.index.strm_offset[0], s->chunk.strm_len[CI_DATA]); uint32_t ofs1 = min(s->top.data.index.strm_offset[1], s->chunk.strm_len[CI_DATA2]); uint32_t rowgroup_rowofs = (level == 0) ? (blockIdx.y - min(s->chunk.rowgroup_id, blockIdx.y)) * rowidx_stride : s->top.data.index.start_row; ; s->chunk.streams[CI_DATA] += ofs0; s->chunk.strm_len[CI_DATA] -= ofs0; s->chunk.streams[CI_DATA2] += ofs1; s->chunk.strm_len[CI_DATA2] -= ofs1; rowgroup_rowofs = min(rowgroup_rowofs, s->chunk.num_rows); s->chunk.start_row += rowgroup_rowofs; s->chunk.num_rows -= rowgroup_rowofs; } s->is_string = (s->chunk.type_kind == STRING || s->chunk.type_kind == BINARY || s->chunk.type_kind == VARCHAR || s->chunk.type_kind == CHAR); s->top.data.cur_row = max(s->chunk.start_row, max((int32_t)(first_row - s->chunk.skip_count), 0)); s->top.data.end_row = s->chunk.start_row + s->chunk.num_rows; s->top.data.buffered_count = 0; if (s->top.data.end_row > first_row + max_num_rows) { s->top.data.end_row = first_row + max_num_rows; } if (num_rowgroups > 0) { s->top.data.end_row = min(s->top.data.end_row, s->chunk.start_row + s->top.data.index.num_rows); } if (!is_dictionary(s->chunk.encoding_kind)) { s->chunk.dictionary_start = 0; } static constexpr duration_s d_orc_utc_epoch = duration_s{orc_utc_epoch}; s->top.data.tz_epoch = d_orc_utc_epoch - get_ut_offset(tz_table, timestamp_s{d_orc_utc_epoch}); bytestream_init(&s->bs, s->chunk.streams[CI_DATA], s->chunk.strm_len[CI_DATA]); bytestream_init(&s->bs2, s->chunk.streams[CI_DATA2], s->chunk.strm_len[CI_DATA2]); } __syncthreads(); while (is_valid && (s->top.data.cur_row < s->top.data.end_row)) { uint32_t list_child_elements = 0; bytestream_fill(&s->bs, t); bytestream_fill(&s->bs2, t); __syncthreads(); if (t == 0) { uint32_t max_vals = s->chunk.start_row + s->chunk.num_rows - s->top.data.cur_row; if (num_rowgroups > 0 && (s->is_string || s->chunk.type_kind == TIMESTAMP)) { max_vals += s->top.data.index.run_pos[is_dictionary(s->chunk.encoding_kind) ? CI_DATA : CI_DATA2]; } s->bs.fill_count = 0; s->bs2.fill_count = 0; s->top.data.nrows = 0; s->top.data.max_vals = min(max_vals, (s->chunk.type_kind == BOOLEAN) ? blockDim.x * 2 : blockDim.x); } __syncthreads(); // Decode data streams { uint32_t numvals = s->top.data.max_vals; uint64_t secondary_val = 0; uint32_t vals_skipped = 0; if (s->is_string || s->chunk.type_kind == TIMESTAMP) { // For these data types, we have a secondary unsigned 32-bit data stream orc_bytestream_s* bs = (is_dictionary(s->chunk.encoding_kind)) ? &s->bs : &s->bs2; uint32_t ofs = 0; if (s->chunk.type_kind == TIMESTAMP) { // Restore buffered secondary stream values, if any ofs = s->top.data.buffered_count; if (ofs > 0) { __syncthreads(); if (t == 0) { s->top.data.buffered_count = 0; } } } if (numvals > ofs) { if (is_rlev1(s->chunk.encoding_kind)) { if (s->chunk.type_kind == TIMESTAMP) numvals = ofs + Integer_RLEv1(bs, &s->u.rlev1, &s->vals.u64[ofs], numvals - ofs, t); else numvals = ofs + Integer_RLEv1(bs, &s->u.rlev1, &s->vals.u32[ofs], numvals - ofs, t); } else { if (s->chunk.type_kind == TIMESTAMP) numvals = ofs + Integer_RLEv2(bs, &s->u.rlev2, &s->vals.u64[ofs], numvals - ofs, t, ofs > 0); else numvals = ofs + Integer_RLEv2(bs, &s->u.rlev2, &s->vals.u32[ofs], numvals - ofs, t, ofs > 0); } __syncthreads(); if (numvals <= ofs && t >= ofs && t < s->top.data.max_vals) { s->vals.u32[t] = 0; } } // If we're using an index, we may have to drop values from the initial run if (num_rowgroups > 0) { int cid = is_dictionary(s->chunk.encoding_kind) ? CI_DATA : CI_DATA2; uint32_t run_pos = s->top.data.index.run_pos[cid]; if (run_pos) { vals_skipped = min(numvals, run_pos); __syncthreads(); if (t == 0) { s->top.data.index.run_pos[cid] = 0; } numvals -= vals_skipped; if (t < numvals) { secondary_val = (s->chunk.type_kind == TIMESTAMP) ? s->vals.u64[vals_skipped + t] : s->vals.u32[vals_skipped + t]; } __syncthreads(); if (t < numvals) { if (s->chunk.type_kind == TIMESTAMP) s->vals.u64[t] = secondary_val; else s->vals.u32[t] = secondary_val; } } } __syncthreads(); // For strings with direct encoding, we need to convert the lengths into an offset if (!is_dictionary(s->chunk.encoding_kind)) { if (t < numvals) secondary_val = (s->chunk.type_kind == TIMESTAMP) ? s->vals.u64[t] : s->vals.u32[t]; if (s->chunk.type_kind != TIMESTAMP) { lengths_to_positions(s->vals.u32, numvals, t); __syncthreads(); } } // Adjust the maximum number of values if (numvals == 0 && vals_skipped == 0) { numvals = s->top.data.max_vals; // Just so that we don't hang if the stream is corrupted } if (t == 0 && numvals < s->top.data.max_vals) { s->top.data.max_vals = numvals; } } __syncthreads(); // Account for skipped values if (num_rowgroups > 0 && !s->is_string) { uint32_t run_pos = (s->chunk.type_kind == DECIMAL || s->chunk.type_kind == LIST || s->chunk.type_kind == MAP) ? s->top.data.index.run_pos[CI_DATA2] : s->top.data.index.run_pos[CI_DATA]; numvals = min(numvals + run_pos, (s->chunk.type_kind == BOOLEAN) ? blockDim.x * 2 : blockDim.x); } // Decode the primary data stream if (s->chunk.type_kind == INT || s->chunk.type_kind == DATE || s->chunk.type_kind == SHORT) { // Signed int32 primary data stream if (is_rlev1(s->chunk.encoding_kind)) { numvals = Integer_RLEv1(&s->bs, &s->u.rlev1, s->vals.i32, numvals, t); } else { numvals = Integer_RLEv2(&s->bs, &s->u.rlev2, s->vals.i32, numvals, t); } __syncthreads(); } else if (s->chunk.type_kind == LIST or s->chunk.type_kind == MAP) { if (is_rlev1(s->chunk.encoding_kind)) { numvals = Integer_RLEv1<uint64_t>(&s->bs2, &s->u.rlev1, s->vals.u64, numvals, t); } else { numvals = Integer_RLEv2<uint64_t>(&s->bs2, &s->u.rlev2, s->vals.u64, numvals, t); } __syncthreads(); } else if (s->chunk.type_kind == BYTE) { numvals = Byte_RLE(&s->bs, &s->u.rle8, s->vals.u8, numvals, t); __syncthreads(); } else if (s->chunk.type_kind == BOOLEAN) { int n = ((numvals + 7) >> 3); if (n > s->top.data.buffered_count) { numvals = Byte_RLE(&s->bs, &s->u.rle8, &s->vals.u8[s->top.data.buffered_count], n - s->top.data.buffered_count, t) + s->top.data.buffered_count; } else { numvals = s->top.data.buffered_count; } __syncthreads(); if (t == 0) { s->top.data.buffered_count = 0; s->top.data.max_vals = min(s->top.data.max_vals, blockDim.x); } __syncthreads(); // If the condition is false, then it means that s->top.data.max_vals is last set of values. // And as numvals is considered to be min(`max_vals+s->top.data.index.run_pos[CI_DATA]`, // blockDim.x*2) we have to return numvals >= s->top.data.index.run_pos[CI_DATA]. auto const is_last_set = (s->top.data.max_vals >= s->top.data.index.run_pos[CI_DATA]); auto const max_vals = (is_last_set ? s->top.data.max_vals + 7 : blockDim.x) / 8; n = numvals - max_vals; if (t < n) { secondary_val = s->vals.u8[max_vals + t]; if (t == 0) { s->top.data.buffered_count = n; } } numvals = min(numvals * 8, is_last_set ? (s->top.data.max_vals + 7) & (~0x7) : blockDim.x); } else if (s->chunk.type_kind == LONG || s->chunk.type_kind == TIMESTAMP || s->chunk.type_kind == DECIMAL) { orc_bytestream_s* bs = (s->chunk.type_kind == DECIMAL) ? &s->bs2 : &s->bs; if (is_rlev1(s->chunk.encoding_kind)) { numvals = Integer_RLEv1<int64_t>(bs, &s->u.rlev1, s->vals.i64, numvals, t); } else { numvals = Integer_RLEv2<int64_t>(bs, &s->u.rlev2, s->vals.i64, numvals, t); } if (s->chunk.type_kind == DECIMAL) { // If we're using an index, we may have to drop values from the initial run uint32_t skip = 0; int val_scale; if (num_rowgroups > 0) { uint32_t run_pos = s->top.data.index.run_pos[CI_DATA2]; if (run_pos) { skip = min(numvals, run_pos); __syncthreads(); if (t == 0) { s->top.data.index.run_pos[CI_DATA2] = 0; } numvals -= skip; } } val_scale = (t < numvals) ? (int)s->vals.i64[skip + t] : 0; __syncthreads(); numvals = Decode_Decimals(&s->bs, &s->u.rle8, s->vals, val_scale, numvals, s->chunk.dtype_id, s->chunk.decimal_scale, t); } __syncthreads(); } else if (s->chunk.type_kind == FLOAT) { numvals = min(numvals, (bytestream_buffer_size - 8u) >> 2); if (t < numvals) { s->vals.u32[t] = bytestream_readu32(&s->bs, s->bs.pos + t * 4); } __syncthreads(); if (t == 0) { bytestream_flush_bytes(&s->bs, numvals * 4); } __syncthreads(); } else if (s->chunk.type_kind == DOUBLE) { numvals = min(numvals, (bytestream_buffer_size - 8u) >> 3); if (t < numvals) { s->vals.u64[t] = bytestream_readu64(&s->bs, s->bs.pos + t * 8); } __syncthreads(); if (t == 0) { bytestream_flush_bytes(&s->bs, numvals * 8); } __syncthreads(); } __syncthreads(); if (numvals == 0 && vals_skipped != 0 && num_rowgroups > 0) { // Special case if the secondary streams produced fewer values than the primary stream's RLE // run, as a result of initial RLE run offset: keep vals_skipped as non-zero to ensure // proper buffered_count/max_vals update below. } else { vals_skipped = 0; if (num_rowgroups > 0) { uint32_t run_pos = (s->chunk.type_kind == LIST or s->chunk.type_kind == MAP) ? s->top.data.index.run_pos[CI_DATA2] : s->top.data.index.run_pos[CI_DATA]; if (run_pos) { vals_skipped = min(numvals, run_pos); numvals -= vals_skipped; __syncthreads(); if (t == 0) { (s->chunk.type_kind == LIST or s->chunk.type_kind == MAP) ? s->top.data.index.run_pos[CI_DATA2] = 0 : s->top.data.index.run_pos[CI_DATA] = 0; } } } } if (t == 0 && numvals + vals_skipped > 0) { auto const max_vals = s->top.data.max_vals; if (max_vals > numvals) { if (s->chunk.type_kind == TIMESTAMP) { s->top.data.buffered_count = max_vals - numvals; } s->top.data.max_vals = numvals; } } __syncthreads(); // Use the valid bits to compute non-null row positions until we get a full batch of values to // decode DecodeRowPositions(s, first_row, t, temp_storage.blk_uint32); if (!s->top.data.nrows && !s->u.rowdec.nz_count && !vals_skipped) { // This is a bug (could happen with bitstream errors with a bad run that would produce more // values than the number of remaining rows) return; } // Store decoded values to output if (t < min(min(s->top.data.max_vals, s->u.rowdec.nz_count), s->top.data.nrows) && s->u.rowdec.row[t] != 0 && s->top.data.cur_row + s->u.rowdec.row[t] - 1 < s->top.data.end_row) { size_t row = s->top.data.cur_row + s->u.rowdec.row[t] - 1 - first_row; if (row < max_num_rows) { void* data_out = s->chunk.column_data_base; switch (s->chunk.type_kind) { case FLOAT: case INT: static_cast<uint32_t*>(data_out)[row] = s->vals.u32[t + vals_skipped]; break; case DOUBLE: case LONG: static_cast<uint64_t*>(data_out)[row] = s->vals.u64[t + vals_skipped]; break; case DECIMAL: if (s->chunk.dtype_id == type_id::DECIMAL32) { static_cast<uint32_t*>(data_out)[row] = s->vals.u32[t + vals_skipped]; } else if (s->chunk.dtype_id == type_id::DECIMAL64) { static_cast<uint64_t*>(data_out)[row] = s->vals.u64[t + vals_skipped]; } else { // decimal128 static_cast<__uint128_t*>(data_out)[row] = s->vals.u128[t + vals_skipped]; } break; case MAP: case LIST: { // Since the offsets column in cudf is `size_type`, // If the limit exceeds then value will be 0, which is Fail. cudf_assert( (s->vals.u64[t + vals_skipped] <= std::numeric_limits<size_type>::max()) and "Number of elements is more than what size_type can handle"); list_child_elements = s->vals.u64[t + vals_skipped]; static_cast<uint32_t*>(data_out)[row] = list_child_elements; } break; case SHORT: static_cast<uint16_t*>(data_out)[row] = static_cast<uint16_t>(s->vals.u32[t + vals_skipped]); break; case BYTE: static_cast<uint8_t*>(data_out)[row] = s->vals.u8[t + vals_skipped]; break; case BOOLEAN: static_cast<uint8_t*>(data_out)[row] = (s->vals.u8[(t + vals_skipped) >> 3] >> ((~(t + vals_skipped)) & 7)) & 1; break; case DATE: if (s->chunk.dtype_len == 8) { cudf::duration_D days{s->vals.i32[t + vals_skipped]}; // Convert from days to milliseconds static_cast<int64_t*>(data_out)[row] = cuda::std::chrono::duration_cast<cudf::duration_ms>(days).count(); } else { static_cast<uint32_t*>(data_out)[row] = s->vals.u32[t + vals_skipped]; } break; case STRING: case BINARY: case VARCHAR: case CHAR: { string_index_pair* strdesc = &static_cast<string_index_pair*>(data_out)[row]; void const* ptr = nullptr; uint32_t count = 0; if (is_dictionary(s->chunk.encoding_kind)) { auto const dict_idx = s->vals.u32[t + vals_skipped]; if (dict_idx < s->chunk.dict_len) { auto const& g_entry = global_dictionary[s->chunk.dictionary_start + dict_idx]; ptr = s->chunk.streams[CI_DICTIONARY] + g_entry.pos; count = g_entry.len; } } else { auto const dict_idx = s->chunk.dictionary_start + s->vals.u32[t + vals_skipped] - secondary_val; if (dict_idx + count <= s->chunk.strm_len[CI_DATA]) { ptr = s->chunk.streams[CI_DATA] + dict_idx; count = secondary_val; } } strdesc->first = static_cast<char const*>(ptr); strdesc->second = count; break; } case TIMESTAMP: { auto seconds = s->top.data.tz_epoch + duration_s{s->vals.i64[t + vals_skipped]}; // Convert to UTC seconds += get_ut_offset(tz_table, timestamp_s{seconds}); duration_ns nanos = duration_ns{(static_cast<int64_t>(secondary_val) >> 3) * kTimestampNanoScale[secondary_val & 7]}; // Adjust seconds only for negative timestamps with positive nanoseconds. // Alternative way to represent negative timestamps is with negative nanoseconds // in which case the adjustment in not needed. // Comparing with 999999 instead of zero to match the apache writer. if (seconds.count() < 0 and nanos.count() > 999999) { seconds -= duration_s{1}; } static_cast<int64_t*>(data_out)[row] = [&]() { using cuda::std::chrono::duration_cast; switch (s->chunk.timestamp_type_id) { case type_id::TIMESTAMP_SECONDS: return (seconds + duration_cast<duration_s>(nanos)).count(); case type_id::TIMESTAMP_MILLISECONDS: return (seconds + duration_cast<duration_ms>(nanos)).count(); case type_id::TIMESTAMP_MICROSECONDS: return (seconds + duration_cast<duration_us>(nanos)).count(); case type_id::TIMESTAMP_NANOSECONDS: default: // nanoseconds as output in case of `type_id::EMPTY` and // `type_id::TIMESTAMP_NANOSECONDS` return (seconds + nanos).count(); } }(); break; } } } } // Aggregate num of elements for the chunk if (s->chunk.type_kind == LIST or s->chunk.type_kind == MAP) { list_child_elements = block_reduce(temp_storage.blk_uint64).Sum(list_child_elements); } __syncthreads(); // Buffer secondary stream values if (s->chunk.type_kind == TIMESTAMP) { int buffer_pos = s->top.data.max_vals; if (t >= buffer_pos && t < buffer_pos + s->top.data.buffered_count) { s->vals.u64[t - buffer_pos] = secondary_val; } } else if (s->chunk.type_kind == BOOLEAN && t < s->top.data.buffered_count) { s->vals.u8[t] = secondary_val; } } __syncthreads(); if (t == 0) { s->top.data.cur_row += s->top.data.nrows; if (s->chunk.type_kind == LIST or s->chunk.type_kind == MAP) { s->num_child_rows += list_child_elements; } if (s->is_string && !is_dictionary(s->chunk.encoding_kind) && s->top.data.max_vals > 0) { s->chunk.dictionary_start += s->vals.u32[s->top.data.max_vals - 1]; } } __syncthreads(); } if (t == 0 and (s->chunk.type_kind == LIST or s->chunk.type_kind == MAP)) { if (num_rowgroups > 0) { row_groups[blockIdx.y][blockIdx.x].num_child_rows = s->num_child_rows; } atomicAdd(&chunks[chunk_id].num_child_rows, s->num_child_rows); } } /** * @brief Launches kernel for decoding NULLs and building string dictionary index tables * * @param[in] chunks ColumnDesc device array [stripe][column] * @param[in] global_dictionary Global dictionary device array * @param[in] num_columns Number of columns * @param[in] num_stripes Number of stripes * @param[in] first_row Crop all rows below first_row * @param[in] stream CUDA stream used for device memory operations and kernel launches */ void __host__ DecodeNullsAndStringDictionaries(ColumnDesc* chunks, DictionaryEntry* global_dictionary, uint32_t num_columns, uint32_t num_stripes, size_t first_row, rmm::cuda_stream_view stream) { dim3 dim_block(block_size, 1); dim3 dim_grid(num_columns, num_stripes * 2); // 1024 threads per chunk gpuDecodeNullsAndStringDictionaries<block_size><<<dim_grid, dim_block, 0, stream.value()>>>( chunks, global_dictionary, num_columns, num_stripes, first_row); } /** * @brief Launches kernel for decoding column data * * @param[in] chunks ColumnDesc device array [stripe][column] * @param[in] global_dictionary Global dictionary device array * @param[in] num_columns Number of columns * @param[in] num_stripes Number of stripes * @param[in] first_row Crop all rows below first_row * @param[in] tz_table Timezone translation table * @param[in] row_groups Optional row index data [row_group][column] * @param[in] num_rowgroups Number of row groups in row index data * @param[in] rowidx_stride Row index stride * @param[in] level nesting level being processed * @param[in] stream CUDA stream used for device memory operations and kernel launches */ void __host__ DecodeOrcColumnData(ColumnDesc* chunks, DictionaryEntry* global_dictionary, device_2dspan<RowGroup> row_groups, uint32_t num_columns, uint32_t num_stripes, size_t first_row, table_device_view tz_table, uint32_t num_rowgroups, uint32_t rowidx_stride, size_t level, size_type* error_count, rmm::cuda_stream_view stream) { uint32_t num_chunks = num_columns * num_stripes; dim3 dim_block(block_size, 1); // 1024 threads per chunk dim3 dim_grid((num_rowgroups > 0) ? num_columns : num_chunks, (num_rowgroups > 0) ? num_rowgroups : 1); gpuDecodeOrcColumnData<block_size><<<dim_grid, dim_block, 0, stream.value()>>>( chunks, global_dictionary, tz_table, row_groups, first_row, rowidx_stride, level, error_count); } } // namespace gpu } // namespace orc } // namespace io } // namespace cudf
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/orc/orc.cpp
/* * Copyright (c) 2019-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "orc.hpp" #include "orc_field_reader.hpp" #include "orc_field_writer.hpp" #include <cudf/lists/lists_column_view.hpp> #include <thrust/tabulate.h> #include <string> namespace cudf::io::orc { namespace { [[nodiscard]] constexpr uint32_t varint_size(uint64_t val) { auto len = 1u; while (val > 0x7f) { val >>= 7; ++len; } return len; } } // namespace uint32_t ProtobufReader::read_field_size(uint8_t const* end) { auto const size = get<uint32_t>(); CUDF_EXPECTS(size <= static_cast<uint32_t>(end - m_cur), "Protobuf parsing out of bounds"); return size; } void ProtobufReader::skip_struct_field(int t) { switch (t) { case ProtofType::VARINT: get<uint32_t>(); break; case ProtofType::FIXED64: skip_bytes(8); break; case ProtofType::FIXEDLEN: skip_bytes(get<uint32_t>()); break; case ProtofType::FIXED32: skip_bytes(4); break; default: break; } } void ProtobufReader::read(PostScript& s, size_t maxlen) { auto op = std::tuple(field_reader(1, s.footerLength), field_reader(2, s.compression), field_reader(3, s.compressionBlockSize), packed_field_reader(4, s.version), field_reader(5, s.metadataLength), field_reader(8000, s.magic)); function_builder(s, maxlen, op); } void ProtobufReader::read(FileFooter& s, size_t maxlen) { auto op = std::tuple(field_reader(1, s.headerLength), field_reader(2, s.contentLength), field_reader(3, s.stripes), field_reader(4, s.types), field_reader(5, s.metadata), field_reader(6, s.numberOfRows), raw_field_reader(7, s.statistics), field_reader(8, s.rowIndexStride)); function_builder(s, maxlen, op); } void ProtobufReader::read(StripeInformation& s, size_t maxlen) { auto op = std::tuple(field_reader(1, s.offset), field_reader(2, s.indexLength), field_reader(3, s.dataLength), field_reader(4, s.footerLength), field_reader(5, s.numberOfRows)); function_builder(s, maxlen, op); } void ProtobufReader::read(SchemaType& s, size_t maxlen) { auto op = std::tuple(field_reader(1, s.kind), packed_field_reader(2, s.subtypes), field_reader(3, s.fieldNames), field_reader(4, s.maximumLength), field_reader(5, s.precision), field_reader(6, s.scale)); function_builder(s, maxlen, op); } void ProtobufReader::read(UserMetadataItem& s, size_t maxlen) { auto op = std::tuple(field_reader(1, s.name), field_reader(2, s.value)); function_builder(s, maxlen, op); } void ProtobufReader::read(StripeFooter& s, size_t maxlen) { auto op = std::tuple( field_reader(1, s.streams), field_reader(2, s.columns), field_reader(3, s.writerTimezone)); function_builder(s, maxlen, op); } void ProtobufReader::read(Stream& s, size_t maxlen) { auto op = std::tuple(field_reader(1, s.kind), field_reader(2, s.column_id), field_reader(3, s.length)); function_builder(s, maxlen, op); } void ProtobufReader::read(ColumnEncoding& s, size_t maxlen) { auto op = std::tuple(field_reader(1, s.kind), field_reader(2, s.dictionarySize)); function_builder(s, maxlen, op); } void ProtobufReader::read(integer_statistics& s, size_t maxlen) { auto op = std::tuple(field_reader(1, s.minimum), field_reader(2, s.maximum), field_reader(3, s.sum)); function_builder(s, maxlen, op); } void ProtobufReader::read(double_statistics& s, size_t maxlen) { auto op = std::tuple(field_reader(1, s.minimum), field_reader(2, s.maximum), field_reader(3, s.sum)); function_builder(s, maxlen, op); } void ProtobufReader::read(string_statistics& s, size_t maxlen) { auto op = std::tuple(field_reader(1, s.minimum), field_reader(2, s.maximum), field_reader(3, s.sum)); function_builder(s, maxlen, op); } void ProtobufReader::read(bucket_statistics& s, size_t maxlen) { auto op = std::tuple(packed_field_reader(1, s.count)); function_builder(s, maxlen, op); } void ProtobufReader::read(decimal_statistics& s, size_t maxlen) { auto op = std::tuple(field_reader(1, s.minimum), field_reader(2, s.maximum), field_reader(3, s.sum)); function_builder(s, maxlen, op); } void ProtobufReader::read(date_statistics& s, size_t maxlen) { auto op = std::tuple(field_reader(1, s.minimum), field_reader(2, s.maximum)); function_builder(s, maxlen, op); } void ProtobufReader::read(binary_statistics& s, size_t maxlen) { auto op = std::tuple(field_reader(1, s.sum)); function_builder(s, maxlen, op); } void ProtobufReader::read(timestamp_statistics& s, size_t maxlen) { auto op = std::tuple(field_reader(1, s.minimum), field_reader(2, s.maximum), field_reader(3, s.minimum_utc), field_reader(4, s.maximum_utc), field_reader(5, s.minimum_nanos), field_reader(6, s.maximum_nanos)); function_builder(s, maxlen, op); // Adjust nanoseconds because they are encoded as (value + 1) // Range [1, 1000'000] is translated here to [0, 999'999] if (s.minimum_nanos.has_value()) { auto& min_nanos = s.minimum_nanos.value(); CUDF_EXPECTS(min_nanos >= 1 and min_nanos <= 1000'000, "Invalid minimum nanoseconds"); --min_nanos; } if (s.maximum_nanos.has_value()) { auto& max_nanos = s.maximum_nanos.value(); CUDF_EXPECTS(max_nanos >= 1 and max_nanos <= 1000'000, "Invalid maximum nanoseconds"); --max_nanos; } } void ProtobufReader::read(column_statistics& s, size_t maxlen) { auto op = std::tuple(field_reader(1, s.number_of_values), field_reader(2, s.int_stats), field_reader(3, s.double_stats), field_reader(4, s.string_stats), field_reader(5, s.bucket_stats), field_reader(6, s.decimal_stats), field_reader(7, s.date_stats), field_reader(8, s.binary_stats), field_reader(9, s.timestamp_stats), field_reader(10, s.has_null)); function_builder(s, maxlen, op); } void ProtobufReader::read(StripeStatistics& s, size_t maxlen) { auto op = std::tuple(raw_field_reader(1, s.colStats)); function_builder(s, maxlen, op); } void ProtobufReader::read(Metadata& s, size_t maxlen) { auto op = std::tuple(field_reader(1, s.stripeStats)); function_builder(s, maxlen, op); } /** * @brief Add a single rowIndexEntry, negative input values treated as not present */ void ProtobufWriter::put_row_index_entry(int32_t present_blk, int32_t present_ofs, int32_t data_blk, int32_t data_ofs, int32_t data2_blk, int32_t data2_ofs, TypeKind kind, ColStatsBlob const* stats) { ProtobufWriter position_writer; auto const positions_size_offset = position_writer.put_uint( encode_field_number(1, ProtofType::FIXEDLEN)); // 1:positions[packed=true] position_writer.put_byte(0xcd); // positions size placeholder uint32_t positions_size = 0; if (present_blk >= 0) positions_size += position_writer.put_uint(present_blk); if (present_ofs >= 0) { positions_size += position_writer.put_uint(present_ofs); positions_size += position_writer.put_byte(0); // run pos = 0 positions_size += position_writer.put_byte(0); // bit pos = 0 } if (data_blk >= 0) { positions_size += position_writer.put_uint(data_blk); } if (data_ofs >= 0) { positions_size += position_writer.put_uint(data_ofs); if (kind != STRING && kind != FLOAT && kind != DOUBLE && kind != DECIMAL) { // RLE run pos always zero (assumes RLE aligned with row index boundaries) positions_size += position_writer.put_byte(0); if (kind == BOOLEAN) { // bit position in byte, always zero positions_size += position_writer.put_byte(0); } } } // INT kind can be passed in to bypass 2nd stream index (dictionary length streams) if (kind != INT) { if (data2_blk >= 0) { positions_size += position_writer.put_uint(data2_blk); } if (data2_ofs >= 0) { positions_size += position_writer.put_uint(data2_ofs); // RLE run pos always zero (assumes RLE aligned with row index boundaries) positions_size += position_writer.put_byte(0); } } // size of the field 1 position_writer.buffer()[positions_size_offset] = static_cast<uint8_t>(positions_size); auto const stats_size = (stats == nullptr) ? 0 : varint_size(encode_field_number<decltype(*stats)>(2)) + varint_size(stats->size()) + stats->size(); auto const entry_size = position_writer.size() + stats_size; // 1:RowIndex.entry put_uint(encode_field_number(1, ProtofType::FIXEDLEN)); put_uint(entry_size); put_bytes<uint8_t>(position_writer.buffer()); if (stats != nullptr) { put_uint(encode_field_number<decltype(*stats)>(2)); // 2: statistics // Statistics field contains its length as varint and dtype specific data (encoded on the GPU) put_uint(stats->size()); put_bytes<typename ColStatsBlob::value_type>(*stats); } } size_t ProtobufWriter::write(PostScript const& s) { ProtobufFieldWriter w(this); w.field_uint(1, s.footerLength); w.field_uint(2, s.compression); if (s.compression != NONE) { w.field_uint(3, s.compressionBlockSize); } w.field_packed_uint(4, s.version); w.field_uint(5, s.metadataLength); w.field_blob(8000, s.magic); return w.value(); } size_t ProtobufWriter::write(FileFooter const& s) { ProtobufFieldWriter w(this); w.field_uint(1, s.headerLength); w.field_uint(2, s.contentLength); w.field_repeated_struct(3, s.stripes); w.field_repeated_struct(4, s.types); w.field_repeated_struct(5, s.metadata); w.field_uint(6, s.numberOfRows); w.field_repeated_struct_blob(7, s.statistics); w.field_uint(8, s.rowIndexStride); return w.value(); } size_t ProtobufWriter::write(StripeInformation const& s) { ProtobufFieldWriter w(this); w.field_uint(1, s.offset); w.field_uint(2, s.indexLength); w.field_uint(3, s.dataLength); w.field_uint(4, s.footerLength); w.field_uint(5, s.numberOfRows); return w.value(); } size_t ProtobufWriter::write(SchemaType const& s) { ProtobufFieldWriter w(this); w.field_uint(1, s.kind); w.field_packed_uint(2, s.subtypes); w.field_repeated_string(3, s.fieldNames); // w.field_uint(4, s.maximumLength); if (s.precision) w.field_uint(5, *s.precision); if (s.scale) w.field_uint(6, *s.scale); return w.value(); } size_t ProtobufWriter::write(UserMetadataItem const& s) { ProtobufFieldWriter w(this); w.field_blob(1, s.name); w.field_blob(2, s.value); return w.value(); } size_t ProtobufWriter::write(StripeFooter const& s) { ProtobufFieldWriter w(this); w.field_repeated_struct(1, s.streams); w.field_repeated_struct(2, s.columns); if (s.writerTimezone != "") { w.field_blob(3, s.writerTimezone); } return w.value(); } size_t ProtobufWriter::write(Stream const& s) { ProtobufFieldWriter w(this); w.field_uint(1, s.kind); if (s.column_id) w.field_uint(2, *s.column_id); w.field_uint(3, s.length); return w.value(); } size_t ProtobufWriter::write(ColumnEncoding const& s) { ProtobufFieldWriter w(this); w.field_uint(1, s.kind); if (s.kind == DICTIONARY || s.kind == DICTIONARY_V2) { w.field_uint(2, s.dictionarySize); } return w.value(); } size_t ProtobufWriter::write(StripeStatistics const& s) { ProtobufFieldWriter w(this); w.field_repeated_struct_blob(1, s.colStats); return w.value(); } size_t ProtobufWriter::write(Metadata const& s) { ProtobufFieldWriter w(this); w.field_repeated_struct(1, s.stripeStats); return w.value(); } OrcDecompressor::OrcDecompressor(CompressionKind kind, uint32_t blockSize) : m_blockSize(blockSize) { switch (kind) { case NONE: _compression = compression_type::NONE; m_log2MaxRatio = 0; break; case ZLIB: _compression = compression_type::ZLIB; m_log2MaxRatio = 11; // < 2048:1 break; case SNAPPY: _compression = compression_type::SNAPPY; m_log2MaxRatio = 5; // < 32:1 break; case LZO: _compression = compression_type::LZO; break; case LZ4: _compression = compression_type::LZ4; break; case ZSTD: m_log2MaxRatio = 15; _compression = compression_type::ZSTD; break; default: CUDF_FAIL("Invalid compression type"); } } host_span<uint8_t const> OrcDecompressor::decompress_blocks(host_span<uint8_t const> src, rmm::cuda_stream_view stream) { // If uncompressed, just pass-through the input if (src.empty() or _compression == compression_type::NONE) { return src; } constexpr size_t header_size = 3; CUDF_EXPECTS(src.size() >= header_size, "Total size is less than the 3-byte header"); // First, scan the input for the number of blocks and worst-case output size size_t max_dst_length = 0; for (size_t i = 0; i + header_size < src.size();) { uint32_t block_len = src[i] | (src[i + 1] << 8) | (src[i + 2] << 16); auto const is_uncompressed = static_cast<bool>(block_len & 1); i += header_size; block_len >>= 1; if (is_uncompressed) { // Uncompressed block max_dst_length += block_len; } else { max_dst_length += m_blockSize; } i += block_len; CUDF_EXPECTS(i <= src.size() and block_len <= m_blockSize, "Error in decompression"); } // Check if we have a single uncompressed block, or no blocks if (max_dst_length < m_blockSize) { return src.subspan(header_size, src.size() - header_size); } m_buf.resize(max_dst_length); size_t dst_length = 0; for (size_t i = 0; i + header_size < src.size();) { uint32_t block_len = src[i] | (src[i + 1] << 8) | (src[i + 2] << 16); auto const is_uncompressed = static_cast<bool>(block_len & 1); i += header_size; block_len >>= 1; if (is_uncompressed) { // Uncompressed block memcpy(m_buf.data() + dst_length, src.data() + i, block_len); dst_length += block_len; } else { // Compressed block dst_length += decompress( _compression, src.subspan(i, block_len), {m_buf.data() + dst_length, m_blockSize}, stream); } i += block_len; } m_buf.resize(dst_length); return m_buf; } metadata::metadata(datasource* const src, rmm::cuda_stream_view stream) : source(src) { auto const len = source->size(); auto const max_ps_size = std::min(len, static_cast<size_t>(256)); // Read uncompressed postscript section (max 255 bytes + 1 byte for length) auto buffer = source->host_read(len - max_ps_size, max_ps_size); size_t const ps_length = buffer->data()[max_ps_size - 1]; uint8_t const* ps_data = &buffer->data()[max_ps_size - ps_length - 1]; ProtobufReader(ps_data, ps_length).read(ps); CUDF_EXPECTS(ps.footerLength + ps_length < len, "Invalid footer length"); // If compression is used, the rest of the metadata is compressed // If no compressed is used, the decompressor is simply a pass-through decompressor = std::make_unique<OrcDecompressor>(ps.compression, ps.compressionBlockSize); // Read compressed filefooter section buffer = source->host_read(len - ps_length - 1 - ps.footerLength, ps.footerLength); auto const ff_data = decompressor->decompress_blocks({buffer->data(), buffer->size()}, stream); ProtobufReader(ff_data.data(), ff_data.size()).read(ff); CUDF_EXPECTS(get_num_columns() > 0, "No columns found"); // Read compressed metadata section buffer = source->host_read(len - ps_length - 1 - ps.footerLength - ps.metadataLength, ps.metadataLength); auto const md_data = decompressor->decompress_blocks({buffer->data(), buffer->size()}, stream); orc::ProtobufReader(md_data.data(), md_data.size()).read(md); init_parent_descriptors(); init_column_names(); } void metadata::init_column_names() { column_names.resize(get_num_columns()); thrust::tabulate(column_names.begin(), column_names.end(), [&](auto col_id) { if (not column_has_parent(col_id)) return std::string{}; auto const& parent_field_names = ff.types[parent_id(col_id)].fieldNames; if (field_index(col_id) < static_cast<size_type>(parent_field_names.size())) { return parent_field_names[field_index(col_id)]; } // Generate names for list and map child columns if (ff.types[parent_id(col_id)].subtypes.size() == 1) { return std::to_string(lists_column_view::child_column_index); } else { return std::to_string(field_index(col_id)); } }); column_paths.resize(get_num_columns()); thrust::tabulate(column_paths.begin(), column_paths.end(), [&](auto col_id) { if (not column_has_parent(col_id)) return std::string{}; // Don't include ORC root column name in path return (parent_id(col_id) == 0 ? "" : column_paths[parent_id(col_id)] + ".") + column_names[col_id]; }); } void metadata::init_parent_descriptors() { auto const num_columns = static_cast<size_type>(ff.types.size()); parents.resize(num_columns); for (size_type col_id = 0; col_id < num_columns; ++col_id) { auto const& subtypes = ff.types[col_id].subtypes; auto const num_children = static_cast<size_type>(subtypes.size()); for (size_type field_idx = 0; field_idx < num_children; ++field_idx) { auto const child_id = static_cast<size_type>(subtypes[field_idx]); CUDF_EXPECTS(child_id > col_id && child_id < num_columns, "Invalid column id"); CUDF_EXPECTS(not column_has_parent(child_id), "Same node referenced twice"); parents[child_id] = {col_id, field_idx}; } } } } // namespace cudf::io::orc
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/orc/stripe_init.cu
/* * Copyright (c) 2019-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "orc_gpu.hpp" #include <cudf/io/orc_types.hpp> #include <io/utilities/block_utils.cuh> #include <cub/cub.cuh> #include <rmm/cuda_stream_view.hpp> #include <thrust/copy.h> #include <thrust/execution_policy.h> namespace cudf { namespace io { namespace orc { namespace gpu { struct comp_in_out { uint8_t const* in_ptr{}; size_t in_size{}; uint8_t* out_ptr{}; size_t out_size{}; }; struct compressed_stream_s { CompressedStreamInfo info{}; comp_in_out ctl{}; }; // blockDim {128,1,1} __global__ void __launch_bounds__(128, 8) gpuParseCompressedStripeData( CompressedStreamInfo* strm_info, int32_t num_streams, uint32_t block_size, uint32_t log2maxcr) { __shared__ compressed_stream_s strm_g[4]; compressed_stream_s* const s = &strm_g[threadIdx.x / 32]; int strm_id = blockIdx.x * 4 + (threadIdx.x / 32); int lane_id = threadIdx.x % 32; if (strm_id < num_streams && lane_id == 0) { s->info = strm_info[strm_id]; } __syncthreads(); if (strm_id < num_streams) { // Walk through the compressed blocks uint8_t const* cur = s->info.compressed_data; uint8_t const* end = cur + s->info.compressed_data_size; uint8_t* uncompressed = s->info.uncompressed_data; size_t max_uncompressed_size = 0; uint32_t max_uncompressed_block_size = 0; uint32_t num_compressed_blocks = 0; uint32_t num_uncompressed_blocks = 0; while (cur + block_header_size < end) { uint32_t block_len = shuffle((lane_id == 0) ? cur[0] | (cur[1] << 8) | (cur[2] << 16) : 0); auto const is_uncompressed = static_cast<bool>(block_len & 1); uint32_t uncompressed_size; device_span<uint8_t const>* init_in_ctl = nullptr; device_span<uint8_t>* init_out_ctl = nullptr; block_len >>= 1; cur += block_header_size; if (block_len > block_size || cur + block_len > end) { // Fatal num_compressed_blocks = 0; max_uncompressed_size = 0; max_uncompressed_block_size = 0; break; } // TBD: For some codecs like snappy, it wouldn't be too difficult to get the actual // uncompressed size and avoid waste due to block size alignment For now, rely on the max // compression ratio to limit waste for the most extreme cases (small single-block streams) uncompressed_size = (is_uncompressed) ? block_len : (block_len < (block_size >> log2maxcr)) ? block_len << log2maxcr : block_size; if (is_uncompressed) { if (uncompressed_size <= 32) { // For short blocks, copy the uncompressed data to output if (uncompressed && max_uncompressed_size + uncompressed_size <= s->info.max_uncompressed_size && lane_id < uncompressed_size) { uncompressed[max_uncompressed_size + lane_id] = cur[lane_id]; } } else { init_in_ctl = (s->info.copy_in_ctl && num_uncompressed_blocks < s->info.num_uncompressed_blocks) ? &s->info.copy_in_ctl[num_uncompressed_blocks] : nullptr; init_out_ctl = (s->info.copy_out_ctl && num_uncompressed_blocks < s->info.num_uncompressed_blocks) ? &s->info.copy_out_ctl[num_uncompressed_blocks] : nullptr; num_uncompressed_blocks++; } } else { init_in_ctl = (s->info.dec_in_ctl && num_compressed_blocks < s->info.num_compressed_blocks) ? &s->info.dec_in_ctl[num_compressed_blocks] : nullptr; init_out_ctl = (s->info.dec_out_ctl && num_compressed_blocks < s->info.num_compressed_blocks) ? &s->info.dec_out_ctl[num_compressed_blocks] : nullptr; num_compressed_blocks++; } if (!lane_id && init_in_ctl) { s->ctl = {cur, block_len, uncompressed + max_uncompressed_size, uncompressed_size}; } __syncwarp(); if (init_in_ctl && lane_id == 0) { *init_in_ctl = {s->ctl.in_ptr, s->ctl.in_size}; *init_out_ctl = {s->ctl.out_ptr, s->ctl.out_size}; } cur += block_len; max_uncompressed_size += uncompressed_size; max_uncompressed_block_size = max(max_uncompressed_block_size, uncompressed_size); } __syncwarp(); if (!lane_id) { s->info.num_compressed_blocks = num_compressed_blocks; s->info.num_uncompressed_blocks = num_uncompressed_blocks; s->info.max_uncompressed_size = max_uncompressed_size; s->info.max_uncompressed_block_size = max_uncompressed_block_size; } } __syncthreads(); if (strm_id < num_streams && lane_id == 0) strm_info[strm_id] = s->info; } // blockDim {128,1,1} __global__ void __launch_bounds__(128, 8) gpuPostDecompressionReassemble(CompressedStreamInfo* strm_info, int32_t num_streams) { __shared__ compressed_stream_s strm_g[4]; compressed_stream_s* const s = &strm_g[threadIdx.x / 32]; int strm_id = blockIdx.x * 4 + (threadIdx.x / 32); int lane_id = threadIdx.x % 32; if (strm_id < num_streams && lane_id == 0) s->info = strm_info[strm_id]; __syncthreads(); if (strm_id < num_streams && s->info.num_compressed_blocks + s->info.num_uncompressed_blocks > 0 && s->info.max_uncompressed_size > 0) { // Walk through the compressed blocks uint8_t const* cur = s->info.compressed_data; uint8_t const* end = cur + s->info.compressed_data_size; auto dec_out = s->info.dec_out_ctl; auto dec_result = s->info.dec_res; uint8_t* uncompressed_actual = s->info.uncompressed_data; uint8_t* uncompressed_estimated = uncompressed_actual; uint32_t num_compressed_blocks = 0; uint32_t max_compressed_blocks = s->info.num_compressed_blocks; while (cur + block_header_size < end) { uint32_t block_len = shuffle((lane_id == 0) ? cur[0] | (cur[1] << 8) | (cur[2] << 16) : 0); auto const is_uncompressed = static_cast<bool>(block_len & 1); uint32_t uncompressed_size_est, uncompressed_size_actual; block_len >>= 1; cur += block_header_size; if (cur + block_len > end) { break; } if (is_uncompressed) { uncompressed_size_est = block_len; uncompressed_size_actual = block_len; } else { if (num_compressed_blocks > max_compressed_blocks) { break; } uint32_t const dst_size = dec_out[num_compressed_blocks].size(); uncompressed_size_est = shuffle((lane_id == 0) ? dst_size : 0); uint32_t const bytes_written = dec_result[num_compressed_blocks].bytes_written; uncompressed_size_actual = shuffle((lane_id == 0) ? bytes_written : 0); } // In practice, this should never happen with a well-behaved writer, as we would expect the // uncompressed size to always be equal to the compression block size except for the last // block if (uncompressed_actual < uncompressed_estimated) { // warp-level memmove for (int i = lane_id; i < (int)uncompressed_size_actual; i += 32) { uncompressed_actual[i] = uncompressed_estimated[i]; } } cur += block_len; num_compressed_blocks += 1 - is_uncompressed; uncompressed_estimated += uncompressed_size_est; uncompressed_actual += uncompressed_size_actual; } // Update info with actual uncompressed size if (!lane_id) { size_t total_uncompressed_size = uncompressed_actual - s->info.uncompressed_data; // Set uncompressed size to zero if there were any errors strm_info[strm_id].max_uncompressed_size = (num_compressed_blocks == s->info.num_compressed_blocks) ? total_uncompressed_size : 0; } } } /** * @brief Shared mem state for gpuParseRowGroupIndex */ struct rowindex_state_s { ColumnDesc chunk{}; uint32_t rowgroup_start{}; uint32_t rowgroup_end{}; int is_compressed{}; uint32_t row_index_entry[3] [CI_PRESENT]{}; // NOTE: Assumes CI_PRESENT follows CI_DATA and CI_DATA2 CompressedStreamInfo strm_info[2]{}; RowGroup rowgroups[128]{}; uint32_t compressed_offset[128][2]{}; }; enum row_entry_state_e { NOT_FOUND = 0, GET_LENGTH, SKIP_VARINT, SKIP_FIXEDLEN, STORE_INDEX0, STORE_INDEX1, STORE_INDEX2, }; /** * @brief Calculates the order of index streams based on the index types present in the column. * * @param index_types_bitmap The bitmap of index types showing which index streams are present * * @return The order of index streams */ static auto __device__ index_order_from_index_types(uint32_t index_types_bitmap) { constexpr std::array full_order = {CI_PRESENT, CI_DATA, CI_DATA2}; std::array<uint32_t, full_order.size()> partial_order; thrust::copy_if(thrust::seq, full_order.cbegin(), full_order.cend(), partial_order.begin(), [index_types_bitmap] __device__(auto index_type) { // Check if the index type is present return index_types_bitmap & (1 << index_type); }); return partial_order; } /** * @brief Decode a single row group index entry * * @param[in,out] s row group index state * @param[in] start start position in byte stream * @param[in] end end of byte stream * @return bytes consumed */ static uint32_t __device__ ProtobufParseRowIndexEntry(rowindex_state_s* s, uint8_t const* const start, uint8_t const* const end) { constexpr uint32_t pb_rowindexentry_id = ProtofType::FIXEDLEN + 8; auto const stream_order = index_order_from_index_types(s->chunk.skip_count); uint8_t const* cur = start; row_entry_state_e state = NOT_FOUND; uint32_t length = 0; uint32_t idx_id = 0; uint32_t pos_end = 0; uint32_t ci_id = CI_NUM_STREAMS; while (cur < end) { uint32_t v = 0; for (uint32_t l = 0; l <= 28; l += 7) { uint32_t c = (cur < end) ? *cur++ : 0; v |= (c & 0x7f) << l; if (c <= 0x7f) break; } switch (state) { case NOT_FOUND: if (v == pb_rowindexentry_id) { state = GET_LENGTH; } else { v &= 7; if (v == ProtofType::FIXED64) cur += 8; else if (v == ProtofType::FIXED32) cur += 4; else if (v == ProtofType::VARINT) state = SKIP_VARINT; else if (v == ProtofType::FIXEDLEN) state = SKIP_FIXEDLEN; } break; case SKIP_VARINT: state = NOT_FOUND; break; case SKIP_FIXEDLEN: cur += v; state = NOT_FOUND; break; case GET_LENGTH: if (length == 0) { length = (uint32_t)(cur + v - start); state = NOT_FOUND; // Scan for positions (same field id & low-level type as RowIndexEntry // entry) } else { pos_end = min((uint32_t)(cur + v - start), length); state = STORE_INDEX0; } break; case STORE_INDEX0: // Start of a new entry; determine the stream index types ci_id = stream_order[idx_id++]; if (s->is_compressed) { if (ci_id < CI_PRESENT) s->row_index_entry[0][ci_id] = v; if (cur >= start + pos_end) return length; state = STORE_INDEX1; break; } else { if (ci_id < CI_PRESENT) s->row_index_entry[0][ci_id] = 0; // Fall through to STORE_INDEX1 for uncompressed (always block0) } case STORE_INDEX1: if (ci_id < CI_PRESENT) s->row_index_entry[1][ci_id] = v; if (cur >= start + pos_end) return length; state = (ci_id == CI_DATA && s->chunk.encoding_kind != DICTIONARY && s->chunk.encoding_kind != DICTIONARY_V2 && (s->chunk.type_kind == STRING || s->chunk.type_kind == BINARY || s->chunk.type_kind == VARCHAR || s->chunk.type_kind == CHAR || s->chunk.type_kind == DECIMAL || s->chunk.type_kind == FLOAT || s->chunk.type_kind == DOUBLE)) ? STORE_INDEX0 : STORE_INDEX2; break; case STORE_INDEX2: if (ci_id < CI_PRESENT) { // Boolean columns have an extra byte to indicate the position of the bit within the byte s->row_index_entry[2][ci_id] = (s->chunk.type_kind == BOOLEAN) ? (v << 3) + *cur : v; } if (ci_id == CI_PRESENT || s->chunk.type_kind == BOOLEAN) cur++; if (cur >= start + pos_end) return length; state = STORE_INDEX0; break; } } return (uint32_t)(end - start); } /** * @brief Decode row group index entries * * @param[in,out] s row group index state * @param[in] num_rowgroups Number of index entries to read */ static __device__ void gpuReadRowGroupIndexEntries(rowindex_state_s* s, int num_rowgroups) { uint8_t const* index_data = s->chunk.streams[CI_INDEX]; int index_data_len = s->chunk.strm_len[CI_INDEX]; for (int i = 0; i < num_rowgroups; i++) { s->row_index_entry[0][0] = 0; s->row_index_entry[0][1] = 0; s->row_index_entry[1][0] = 0; s->row_index_entry[1][1] = 0; s->row_index_entry[2][0] = 0; s->row_index_entry[2][1] = 0; if (index_data_len > 0) { int len = ProtobufParseRowIndexEntry(s, index_data, index_data + index_data_len); index_data += len; index_data_len = max(index_data_len - len, 0); for (int j = 0; j < 2; j++) { s->rowgroups[i].strm_offset[j] = s->row_index_entry[1][j]; s->rowgroups[i].run_pos[j] = s->row_index_entry[2][j]; s->compressed_offset[i][j] = s->row_index_entry[0][j]; } } } s->chunk.streams[CI_INDEX] = index_data; s->chunk.strm_len[CI_INDEX] = index_data_len; } /** * @brief Translate block+offset compressed position into an uncompressed offset * * @param[in,out] s row group index state * @param[in] ci_id index to convert (CI_DATA or CI_DATA2) * @param[in] num_rowgroups Number of index entries * @param[in] t thread id */ static __device__ void gpuMapRowIndexToUncompressed(rowindex_state_s* s, int ci_id, int num_rowgroups, int t) { int32_t strm_len = s->chunk.strm_len[ci_id]; if (strm_len > 0) { int32_t compressed_offset = (t < num_rowgroups) ? s->compressed_offset[t][ci_id] : 0; if (compressed_offset > 0) { uint8_t const* start = s->strm_info[ci_id].compressed_data; uint8_t const* cur = start; uint8_t const* end = cur + s->strm_info[ci_id].compressed_data_size; auto dec_result = s->strm_info[ci_id].dec_res.data(); uint32_t uncomp_offset = 0; for (;;) { uint32_t block_len; if (cur + block_header_size > end || cur + block_header_size >= start + compressed_offset) { break; } block_len = cur[0] | (cur[1] << 8) | (cur[2] << 16); cur += block_header_size; auto const is_uncompressed = static_cast<bool>(block_len & 1); block_len >>= 1; cur += block_len; if (cur > end) { break; } if (is_uncompressed) { uncomp_offset += block_len; } else { uncomp_offset += dec_result->bytes_written; dec_result++; } } s->rowgroups[t].strm_offset[ci_id] += uncomp_offset; } } } /** * @brief Decode index streams * * @param[out] row_groups RowGroup device array [rowgroup][column] * @param[in] strm_info List of compressed streams (or NULL if uncompressed) * @param[in] chunks ColumnDesc device array [stripe][column] * @param[in] num_columns Number of columns * @param[in] num_stripes Number of stripes * @param[in] num_rowgroups Number of row groups * @param[in] rowidx_stride Row index stride * @param[in] use_base_stride Whether to use base stride obtained from meta or use the computed * value */ // blockDim {128,1,1} __global__ void __launch_bounds__(128, 8) gpuParseRowGroupIndex(RowGroup* row_groups, CompressedStreamInfo* strm_info, ColumnDesc* chunks, uint32_t num_columns, uint32_t num_stripes, uint32_t num_rowgroups, uint32_t rowidx_stride, bool use_base_stride) { __shared__ __align__(16) rowindex_state_s state_g; rowindex_state_s* const s = &state_g; uint32_t chunk_id = blockIdx.y * num_columns + blockIdx.x; int t = threadIdx.x; if (t == 0) { s->chunk = chunks[chunk_id]; if (strm_info) { if (s->chunk.strm_len[0] > 0) s->strm_info[0] = strm_info[s->chunk.strm_id[0]]; if (s->chunk.strm_len[1] > 0) s->strm_info[1] = strm_info[s->chunk.strm_id[1]]; } uint32_t rowgroups_in_chunk = s->chunk.num_rowgroups; s->rowgroup_start = s->chunk.rowgroup_id; s->rowgroup_end = s->rowgroup_start + rowgroups_in_chunk; s->is_compressed = (strm_info != nullptr); } __syncthreads(); while (s->rowgroup_start < s->rowgroup_end) { int num_rowgroups = min(s->rowgroup_end - s->rowgroup_start, 128); int rowgroup_size4, t4, t32; s->rowgroups[t].chunk_id = chunk_id; if (t == 0) { gpuReadRowGroupIndexEntries(s, num_rowgroups); } __syncthreads(); if (s->is_compressed) { // Convert the block + blk_offset pair into a raw offset into the decompressed stream if (s->chunk.strm_len[CI_DATA] > 0) { gpuMapRowIndexToUncompressed(s, CI_DATA, num_rowgroups, t); } if (s->chunk.strm_len[CI_DATA2] > 0) { gpuMapRowIndexToUncompressed(s, CI_DATA2, num_rowgroups, t); } __syncthreads(); } rowgroup_size4 = sizeof(RowGroup) / sizeof(uint32_t); t4 = t & 3; t32 = t >> 2; for (int i = t32; i < num_rowgroups; i += 32) { auto const num_rows = (use_base_stride) ? rowidx_stride : row_groups[(s->rowgroup_start + i) * num_columns + blockIdx.x].num_rows; auto const start_row = (use_base_stride) ? i * rowidx_stride : row_groups[(s->rowgroup_start + i) * num_columns + blockIdx.x].start_row; for (int j = t4; j < rowgroup_size4; j += 4) { ((uint32_t*)&row_groups[(s->rowgroup_start + i) * num_columns + blockIdx.x])[j] = ((uint32_t*)&s->rowgroups[i])[j]; } row_groups[(s->rowgroup_start + i) * num_columns + blockIdx.x].num_rows = num_rows; // Updating in case of struct row_groups[(s->rowgroup_start + i) * num_columns + blockIdx.x].num_child_rows = num_rows; row_groups[(s->rowgroup_start + i) * num_columns + blockIdx.x].start_row = start_row; } __syncthreads(); if (t == 0) { s->rowgroup_start += num_rowgroups; } __syncthreads(); } } template <int block_size> __global__ void __launch_bounds__(block_size) gpu_reduce_pushdown_masks(device_span<orc_column_device_view const> orc_columns, device_2dspan<rowgroup_rows const> rowgroup_bounds, device_2dspan<size_type> set_counts) { using BlockReduce = cub::BlockReduce<size_type, block_size>; __shared__ typename BlockReduce::TempStorage temp_storage; auto const column_id = blockIdx.x; auto const rowgroup_id = blockIdx.y; auto const column = orc_columns[column_id]; auto const t = threadIdx.x; auto const use_child_rg = column.type().id() == type_id::LIST; auto const rg = rowgroup_bounds[rowgroup_id][column_id + (use_child_rg ? 1 : 0)]; if (column.pushdown_mask == nullptr) { // All elements are valid if the null mask is not present if (t == 0) { set_counts[rowgroup_id][column_id] = rg.size(); } return; }; size_type count = 0; static constexpr size_type bits_per_word = sizeof(bitmask_type) * 8; for (auto row = t * bits_per_word + rg.begin; row < rg.end; row += block_size * bits_per_word) { auto const begin_bit = row; auto const end_bit = min(static_cast<size_type>(row + bits_per_word), rg.end); auto const mask_len = end_bit - begin_bit; auto const mask_word = cudf::detail::get_mask_offset_word(column.pushdown_mask, 0, row, end_bit) & ((1 << mask_len) - 1); count += __popc(mask_word); } count = BlockReduce(temp_storage).Sum(count); if (t == 0) { set_counts[rowgroup_id][column_id] = count; } } void __host__ ParseCompressedStripeData(CompressedStreamInfo* strm_info, int32_t num_streams, uint32_t compression_block_size, uint32_t log2maxcr, rmm::cuda_stream_view stream) { dim3 dim_block(128, 1); dim3 dim_grid((num_streams + 3) >> 2, 1); // 1 stream per warp, 4 warps per block gpuParseCompressedStripeData<<<dim_grid, dim_block, 0, stream.value()>>>( strm_info, num_streams, compression_block_size, log2maxcr); } void __host__ PostDecompressionReassemble(CompressedStreamInfo* strm_info, int32_t num_streams, rmm::cuda_stream_view stream) { dim3 dim_block(128, 1); dim3 dim_grid((num_streams + 3) >> 2, 1); // 1 stream per warp, 4 warps per block gpuPostDecompressionReassemble<<<dim_grid, dim_block, 0, stream.value()>>>(strm_info, num_streams); } void __host__ ParseRowGroupIndex(RowGroup* row_groups, CompressedStreamInfo* strm_info, ColumnDesc* chunks, uint32_t num_columns, uint32_t num_stripes, uint32_t num_rowgroups, uint32_t rowidx_stride, bool use_base_stride, rmm::cuda_stream_view stream) { dim3 dim_block(128, 1); dim3 dim_grid(num_columns, num_stripes); // 1 column chunk per block gpuParseRowGroupIndex<<<dim_grid, dim_block, 0, stream.value()>>>(row_groups, strm_info, chunks, num_columns, num_stripes, num_rowgroups, rowidx_stride, use_base_stride); } void __host__ reduce_pushdown_masks(device_span<orc_column_device_view const> columns, device_2dspan<rowgroup_rows const> rowgroups, device_2dspan<cudf::size_type> valid_counts, rmm::cuda_stream_view stream) { dim3 dim_block(128, 1); dim3 dim_grid(columns.size(), rowgroups.size().first); // 1 rowgroup per block gpu_reduce_pushdown_masks<128> <<<dim_grid, dim_block, 0, stream.value()>>>(columns, rowgroups, valid_counts); } } // namespace gpu } // namespace orc } // namespace io } // namespace cudf
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/orc/aggregate_orc_metadata.cpp
/* * Copyright (c) 2021-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "aggregate_orc_metadata.hpp" #include <io/utilities/row_selection.hpp> #include <algorithm> #include <numeric> #include <optional> namespace cudf::io::orc::detail { column_hierarchy::column_hierarchy(nesting_map child_map) : children{std::move(child_map)} { // Sort columns by nesting levels std::function<void(size_type, int32_t)> levelize = [&](size_type id, int32_t level) { if (static_cast<int32_t>(levels.size()) == level) levels.emplace_back(); levels[level].push_back({id, static_cast<int32_t>(children[id].size())}); for (auto child_id : children[id]) { levelize(child_id, level + 1); } }; std::for_each( children[0].cbegin(), children[0].cend(), [&](auto col_id) { levelize(col_id, 0); }); } namespace { /** * @brief Goes up to the root to include the column with the given id and its parents. */ void update_parent_mapping(std::map<size_type, std::vector<size_type>>& selected_columns, metadata const& metadata, size_type id) { auto current_id = id; while (metadata.column_has_parent(current_id)) { auto parent_id = metadata.parent_id(current_id); if (std::find(selected_columns[parent_id].cbegin(), selected_columns[parent_id].cend(), current_id) == selected_columns[parent_id].end()) { selected_columns[parent_id].push_back(current_id); } current_id = parent_id; } } /** * @brief Adds all columns nested under the column with the given id to the nesting map. */ void add_nested_columns(std::map<size_type, std::vector<size_type>>& selected_columns, std::vector<SchemaType> const& types, size_type id) { for (auto child_id : types[id].subtypes) { if (std::find(selected_columns[id].cbegin(), selected_columns[id].cend(), child_id) == selected_columns[id].end()) { selected_columns[id].push_back(child_id); } add_nested_columns(selected_columns, types, child_id); } } /** * @brief Adds the column with the given id to the mapping * * All nested columns and direct ancestors of column `id` are included. * Columns that are not on the direct path are excluded, which may result in pruning. */ void add_column_to_mapping(std::map<size_type, std::vector<size_type>>& selected_columns, metadata const& metadata, size_type id) { update_parent_mapping(selected_columns, metadata, id); add_nested_columns(selected_columns, metadata.ff.types, id); } /** * @brief Create a metadata object from each element in the source vector */ auto metadatas_from_sources(std::vector<std::unique_ptr<datasource>> const& sources, rmm::cuda_stream_view stream) { std::vector<metadata> metadatas; std::transform( sources.cbegin(), sources.cend(), std::back_inserter(metadatas), [stream](auto const& source) { return metadata(source.get(), stream); }); return metadatas; } } // namespace int64_t aggregate_orc_metadata::calc_num_rows() const { return std::accumulate( per_file_metadata.begin(), per_file_metadata.end(), 0l, [](auto const& sum, auto const& pfm) { return sum + pfm.get_total_rows(); }); } size_type aggregate_orc_metadata::calc_num_stripes() const { return std::accumulate( per_file_metadata.begin(), per_file_metadata.end(), 0, [](auto const& sum, auto const& pfm) { return sum + pfm.get_num_stripes(); }); } aggregate_orc_metadata::aggregate_orc_metadata( std::vector<std::unique_ptr<datasource>> const& sources, rmm::cuda_stream_view stream) : per_file_metadata(metadatas_from_sources(sources, stream)), num_rows(calc_num_rows()), num_stripes(calc_num_stripes()) { // Verify that the input files have the same number of columns, // as well as matching types, compression, and names for (auto const& pfm : per_file_metadata) { CUDF_EXPECTS(per_file_metadata[0].get_num_columns() == pfm.get_num_columns(), "All sources must have the same number of columns"); CUDF_EXPECTS(per_file_metadata[0].ps.compression == pfm.ps.compression, "All sources must have the same compression type"); // Check the types, column names, and decimal scale for (size_t i = 0; i < pfm.ff.types.size(); i++) { CUDF_EXPECTS(pfm.ff.types[i].kind == per_file_metadata[0].ff.types[i].kind, "Column types across all input sources must be the same"); CUDF_EXPECTS(std::equal(pfm.ff.types[i].fieldNames.begin(), pfm.ff.types[i].fieldNames.end(), per_file_metadata[0].ff.types[i].fieldNames.begin()), "All source column names must be the same"); CUDF_EXPECTS( pfm.ff.types[i].scale.value_or(0) == per_file_metadata[0].ff.types[i].scale.value_or(0), "All scale values must be the same"); } } } std::tuple<int64_t, size_type, std::vector<metadata::stripe_source_mapping>> aggregate_orc_metadata::select_stripes( std::vector<std::vector<size_type>> const& user_specified_stripes, uint64_t skip_rows, std::optional<size_type> const& num_rows, rmm::cuda_stream_view stream) { CUDF_EXPECTS((skip_rows == 0 and not num_rows.has_value()) or user_specified_stripes.empty(), "Can't use both the row selection and the stripe selection"); auto [rows_to_skip, rows_to_read] = [&]() { if (not user_specified_stripes.empty()) { return std::pair<uint64_t, size_type>{0, 0}; } return cudf::io::detail::skip_rows_num_rows_from_options(skip_rows, num_rows, get_num_rows()); }(); std::vector<metadata::stripe_source_mapping> selected_stripes_mapping; if (!user_specified_stripes.empty()) { CUDF_EXPECTS(user_specified_stripes.size() == per_file_metadata.size(), "Must specify stripes for each source"); // Each vector entry represents a source file; each nested vector represents the // user_defined_stripes to get from that source file for (size_t src_file_idx = 0; src_file_idx < user_specified_stripes.size(); ++src_file_idx) { std::vector<OrcStripeInfo> stripe_infos; // Coalesce stripe info at the source file later since that makes downstream processing much // easier in impl::read for (auto const& stripe_idx : user_specified_stripes[src_file_idx]) { CUDF_EXPECTS( stripe_idx >= 0 and stripe_idx < static_cast<decltype(stripe_idx)>( per_file_metadata[src_file_idx].ff.stripes.size()), "Invalid stripe index"); stripe_infos.push_back( std::pair(&per_file_metadata[src_file_idx].ff.stripes[stripe_idx], nullptr)); rows_to_read += per_file_metadata[src_file_idx].ff.stripes[stripe_idx].numberOfRows; } selected_stripes_mapping.push_back({static_cast<int>(src_file_idx), stripe_infos}); } } else { uint64_t count = 0; size_type stripe_skip_rows = 0; // Iterate all source files, each source file has corelating metadata for (size_t src_file_idx = 0; src_file_idx < per_file_metadata.size() && count < rows_to_skip + rows_to_read; ++src_file_idx) { std::vector<OrcStripeInfo> stripe_infos; for (size_t stripe_idx = 0; stripe_idx < per_file_metadata[src_file_idx].ff.stripes.size() && count < rows_to_skip + rows_to_read; ++stripe_idx) { count += per_file_metadata[src_file_idx].ff.stripes[stripe_idx].numberOfRows; if (count > rows_to_skip || count == 0) { stripe_infos.push_back( std::pair(&per_file_metadata[src_file_idx].ff.stripes[stripe_idx], nullptr)); } else { stripe_skip_rows = count; } } selected_stripes_mapping.push_back({static_cast<int>(src_file_idx), stripe_infos}); } // Need to remove skipped rows from the stripes which are not selected. rows_to_skip -= stripe_skip_rows; } // Read each stripe's stripefooter metadata if (not selected_stripes_mapping.empty()) { for (auto& mapping : selected_stripes_mapping) { // Resize to all stripe_info for the source level per_file_metadata[mapping.source_idx].stripefooters.resize(mapping.stripe_info.size()); for (size_t i = 0; i < mapping.stripe_info.size(); i++) { auto const stripe = mapping.stripe_info[i].first; auto const sf_comp_offset = stripe->offset + stripe->indexLength + stripe->dataLength; auto const sf_comp_length = stripe->footerLength; CUDF_EXPECTS( sf_comp_offset + sf_comp_length < per_file_metadata[mapping.source_idx].source->size(), "Invalid stripe information"); auto const buffer = per_file_metadata[mapping.source_idx].source->host_read(sf_comp_offset, sf_comp_length); auto sf_data = per_file_metadata[mapping.source_idx].decompressor->decompress_blocks( {buffer->data(), buffer->size()}, stream); ProtobufReader(sf_data.data(), sf_data.size()) .read(per_file_metadata[mapping.source_idx].stripefooters[i]); mapping.stripe_info[i].second = &per_file_metadata[mapping.source_idx].stripefooters[i]; if (stripe->indexLength == 0) { row_grp_idx_present = false; } } } } return {rows_to_skip, rows_to_read, selected_stripes_mapping}; } column_hierarchy aggregate_orc_metadata::select_columns( std::optional<std::vector<std::string>> const& column_paths) const { auto const& pfm = per_file_metadata[0]; column_hierarchy::nesting_map selected_columns; if (not column_paths.has_value()) { for (auto const& col_id : pfm.ff.types[0].subtypes) { add_column_to_mapping(selected_columns, pfm, col_id); } } else { for (auto const& path : column_paths.value()) { bool name_found = false; for (auto col_id = 1; col_id < pfm.get_num_columns(); ++col_id) { if (pfm.column_path(col_id) == path) { name_found = true; add_column_to_mapping(selected_columns, pfm, col_id); break; } } CUDF_EXPECTS(name_found, "Unknown column name: " + std::string(path)); } } return {std::move(selected_columns)}; } } // namespace cudf::io::orc::detail
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/orc/stripe_enc.cu
/* * Copyright (c) 2019-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "orc_gpu.hpp" #include <cudf/io/orc_types.hpp> #include <io/comp/nvcomp_adapter.hpp> #include <io/utilities/block_utils.cuh> #include <io/utilities/config_utils.hpp> #include <io/utilities/time_utils.cuh> #include <cudf/column/column_device_view.cuh> #include <cudf/detail/utilities/integer_utils.hpp> #include <cudf/detail/utilities/vector_factories.hpp> #include <cudf/lists/lists_column_view.hpp> #include <cudf/utilities/bit.hpp> #include <cub/cub.cuh> #include <rmm/cuda_stream_view.hpp> #include <rmm/exec_policy.hpp> #include <thrust/for_each.h> #include <thrust/iterator/zip_iterator.h> #include <thrust/transform.h> #include <thrust/tuple.h> namespace cudf { namespace io { namespace orc { namespace gpu { using cudf::detail::device_2dspan; constexpr int scratch_buffer_size = 512 * 4; constexpr int compact_streams_block_size = 1024; // Apache ORC reader does not handle zero-length patch lists for RLEv2 mode2 // Workaround replaces zero-length patch lists by a dummy zero patch constexpr bool zero_pll_war = true; struct byterle_enc_state_s { uint32_t literal_run; uint32_t repeat_run; uint32_t rpt_map[(512 / 32) + 1]; }; struct intrle_enc_state_s { uint32_t literal_run; uint32_t delta_run; uint32_t literal_mode; uint32_t literal_w; uint32_t hdr_bytes; uint32_t pl_bytes; uint32_t delta_map[(512 / 32) + 1]; }; struct strdata_enc_state_s { uint32_t char_count; uint32_t lengths_red[(512 / 32)]; char const* str_data[512]; }; struct orcenc_state_s { uint32_t cur_row; // Current row in group uint32_t present_rows; // # of rows in present buffer uint32_t present_out; // # of rows in present buffer that have been flushed uint32_t nrows; // # of rows in current batch uint32_t numvals; // # of non-zero values in current batch (<=nrows) uint32_t numlengths; // # of non-zero values in DATA2 batch uint32_t nnz; // Running count of non-null values encoder_chunk_streams stream; EncChunk chunk; uint32_t strm_pos[CI_NUM_STREAMS]; uint8_t valid_buf[512]; // valid map bits union { byterle_enc_state_s byterle; intrle_enc_state_s intrle; strdata_enc_state_s strenc; stripe_dictionary const* dict_stripe; } u; union { uint8_t u8[scratch_buffer_size]; // gblock_vminscratch buffer uint32_t u32[scratch_buffer_size / 4]; } buf; union { uint8_t u8[2048]; uint32_t u32[1024]; int32_t i32[1024]; uint64_t u64[1024]; int64_t i64[1024]; } vals; union { uint8_t u8[2048]; uint32_t u32[1024]; uint64_t u64[1024]; } lengths; }; static inline __device__ uint32_t zigzag(uint32_t v) { return v; } static inline __device__ uint32_t zigzag(int32_t v) { int32_t s = (v >> 31); return ((v ^ s) * 2) - s; } static inline __device__ uint64_t zigzag(uint64_t v) { return v; } static inline __device__ uint64_t zigzag(int64_t v) { int64_t s = (v < 0) ? 1 : 0; return ((v ^ -s) * 2) + s; } static inline __device__ __uint128_t zigzag(__int128_t v) { int64_t s = (v < 0) ? 1 : 0; return ((v ^ -s) * 2) + s; } static inline __device__ uint32_t CountLeadingBytes32(uint32_t v) { return __clz(v) >> 3; } static inline __device__ uint32_t CountLeadingBytes64(uint64_t v) { return __clzll(v) >> 3; } /** * @brief Raw data output * * @tparam cid stream type (strm_pos[cid] will be updated and output stored at * streams[cid]+strm_pos[cid]) * @tparam inmask input buffer position mask for circular buffers * @param[in] s encoder state * @param[in] inbuf base input buffer * @param[in] inpos position in input buffer * @param[in] count number of bytes to encode * @param[in] t thread id */ template <StreamIndexType cid, uint32_t inmask> static __device__ void StoreBytes( orcenc_state_s* s, uint8_t const* inbuf, uint32_t inpos, uint32_t count, int t) { uint8_t* dst = s->stream.data_ptrs[cid] + s->strm_pos[cid]; while (count > 0) { uint32_t n = min(count, 512); if (t < n) { dst[t] = inbuf[(inpos + t) & inmask]; } dst += n; inpos += n; count -= n; } __syncthreads(); if (!t) { s->strm_pos[cid] = static_cast<uint32_t>(dst - s->stream.data_ptrs[cid]); } } /** * @brief ByteRLE encoder * * @tparam cid stream type (strm_pos[cid] will be updated and output stored at * streams[cid]+strm_pos[cid]) * @tparam inmask input buffer position mask for circular buffers * @param[in] s encoder state * @param[in] inbuf base input buffer * @param[in] inpos position in input buffer * @param[in] numvals max number of values to encode * @param[in] flush encode all remaining values if nonzero * @param[in] t thread id * * @return number of input values encoded */ template <StreamIndexType cid, uint32_t inmask> static __device__ uint32_t ByteRLE( orcenc_state_s* s, uint8_t const* inbuf, uint32_t inpos, uint32_t numvals, uint32_t flush, int t) { uint8_t* dst = s->stream.data_ptrs[cid] + s->strm_pos[cid]; uint32_t out_cnt = 0; while (numvals > 0) { uint8_t v0 = (t < numvals) ? inbuf[(inpos + t) & inmask] : 0; uint8_t v1 = (t + 1 < numvals) ? inbuf[(inpos + t + 1) & inmask] : 0; uint32_t rpt_map = ballot(t + 1 < numvals && v0 == v1), literal_run, repeat_run, maxvals = min(numvals, 512); if (!(t & 0x1f)) s->u.byterle.rpt_map[t >> 5] = rpt_map; __syncthreads(); if (t == 0) { // Find the start of an identical 3-byte sequence // TBD: The two loops below could be eliminated using more ballot+ffs using warp0 literal_run = 0; repeat_run = 0; while (literal_run < maxvals) { uint32_t next = s->u.byterle.rpt_map[(literal_run >> 5) + 1]; uint32_t mask = rpt_map & __funnelshift_r(rpt_map, next, 1); if (mask) { uint32_t literal_run_ofs = __ffs(mask) - 1; literal_run += literal_run_ofs; repeat_run = __ffs(~((rpt_map >> literal_run_ofs) >> 1)); if (repeat_run + literal_run_ofs == 32) { while (next == ~0) { uint32_t next_idx = ((literal_run + repeat_run) >> 5) + 1; next = (next_idx < 512 / 32) ? s->u.byterle.rpt_map[next_idx] : 0; repeat_run += 32; } repeat_run += __ffs(~next) - 1; } repeat_run = min(repeat_run + 1, maxvals - min(literal_run, maxvals)); if (repeat_run < 3) { literal_run += (flush && literal_run + repeat_run >= numvals) ? repeat_run : 0; repeat_run = 0; } break; } rpt_map = next; literal_run += 32; } if (repeat_run >= 130) { // Limit large runs to multiples of 130 repeat_run = (repeat_run >= 3 * 130) ? 3 * 130 : (repeat_run >= 2 * 130) ? 2 * 130 : 130; } else if (literal_run && literal_run + repeat_run == maxvals) { repeat_run = 0; // Try again at next iteration } s->u.byterle.repeat_run = repeat_run; s->u.byterle.literal_run = min(literal_run, maxvals); } __syncthreads(); literal_run = s->u.byterle.literal_run; if (!flush && literal_run == numvals) { literal_run &= ~0x7f; if (!literal_run) break; } if (literal_run > 0) { uint32_t num_runs = (literal_run + 0x7f) >> 7; if (t < literal_run) { uint32_t run_id = t >> 7; uint32_t run = min(literal_run - run_id * 128, 128); if (!(t & 0x7f)) dst[run_id + t] = 0x100 - run; dst[run_id + t + 1] = (cid == CI_PRESENT) ? __brev(v0) >> 24 : v0; } dst += num_runs + literal_run; out_cnt += literal_run; numvals -= literal_run; inpos += literal_run; } repeat_run = s->u.byterle.repeat_run; if (repeat_run > 0) { while (repeat_run >= 130) { if (t == literal_run) // repeat_run follows literal_run { dst[0] = 0x7f; dst[1] = (cid == CI_PRESENT) ? __brev(v0) >> 24 : v0; } dst += 2; out_cnt += 130; numvals -= 130; inpos += 130; repeat_run -= 130; } if (!flush && repeat_run == numvals) { // Wait for more data in case we can continue the run later break; } if (repeat_run >= 3) { if (t == literal_run) // repeat_run follows literal_run { dst[0] = repeat_run - 3; dst[1] = (cid == CI_PRESENT) ? __brev(v0) >> 24 : v0; } dst += 2; out_cnt += repeat_run; numvals -= repeat_run; inpos += repeat_run; } } } if (!t) { s->strm_pos[cid] = static_cast<uint32_t>(dst - s->stream.data_ptrs[cid]); } return out_cnt; } /** * @brief Maps the symbol size in bytes to RLEv2 5-bit length code */ static const __device__ __constant__ uint8_t kByteLengthToRLEv2_W[9] = { 0, 7, 15, 23, 27, 28, 29, 30, 31}; /** * @brief Encode a varint value, return the number of bytes written */ static inline __device__ uint32_t StoreVarint(uint8_t* dst, __uint128_t v) { uint32_t bytecnt = 0; for (;;) { auto c = static_cast<uint32_t>(v & 0x7f); v >>= 7u; if (v == 0) { dst[bytecnt++] = c; break; } else { dst[bytecnt++] = c + 0x80; } } return bytecnt; } template <class T> static inline __device__ void StoreBytesBigEndian(uint8_t* dst, T v, uint32_t w) { for (uint32_t i = 0, b = w * 8; i < w; ++i) { b -= 8; dst[i] = static_cast<uint8_t>(v >> b); } } // Combine and store bits for symbol widths less than 8 static inline __device__ void StoreBitsBigEndian( uint8_t* dst, uint32_t v, uint32_t w, int num_vals, int t) { if (t <= (num_vals | 0x1f)) { uint32_t mask; if (w <= 1) { v = (v << 1) | (shuffle_xor(v, 1) & 0x1); v = (v << 2) | (shuffle_xor(v, 2) & 0x3); v = (v << 4) | (shuffle_xor(v, 4) & 0xf); mask = 0x7; } else if (w <= 2) { v = (v << 2) | (shuffle_xor(v, 1) & 0x3); v = (v << 4) | (shuffle_xor(v, 2) & 0xf); mask = 0x3; } else // if (w <= 4) { v = (v << 4) | (shuffle_xor(v, 1) & 0xf); mask = 0x1; } if (t < num_vals && !(t & mask)) { dst[(t * w) >> 3] = static_cast<uint8_t>(v); } } } /** * @brief Integer RLEv2 encoder * * @tparam cid stream type (strm_pos[cid] will be updated and output stored at * streams[cid]+strm_pos[cid]) * @tparam inmask input buffer position mask for circular buffers * @param[in] s encoder state * @param[in] inbuf base input buffer * @param[in] inpos position in input buffer * @param[in] numvals max number of values to encode * @param[in] flush encode all remaining values if nonzero * @param[in] t thread id * @param[in] temp_storage shared memory storage to perform block reduce * * @return number of input values encoded */ template <StreamIndexType cid, class T, bool is_signed, uint32_t inmask, int block_size, typename Storage> static __device__ uint32_t IntegerRLE( orcenc_state_s* s, T const* inbuf, uint32_t inpos, uint32_t numvals, int t, Storage& temp_storage) { using block_reduce = cub::BlockReduce<T, block_size>; uint8_t* dst = s->stream.data_ptrs[cid] + s->strm_pos[cid]; uint32_t out_cnt = 0; __shared__ uint64_t block_vmin; while (numvals > 0) { T v0 = (t < numvals) ? inbuf[(inpos + t) & inmask] : 0; T v1 = (t + 1 < numvals) ? inbuf[(inpos + t + 1) & inmask] : 0; T v2 = (t + 2 < numvals) ? inbuf[(inpos + t + 2) & inmask] : 0; uint32_t delta_map = ballot(t + 2 < numvals && v1 - v0 == v2 - v1), maxvals = min(numvals, 512), literal_run, delta_run; if (!(t & 0x1f)) s->u.intrle.delta_map[t >> 5] = delta_map; __syncthreads(); if (!t) { // Find the start of the next delta run (2 consecutive values with the same delta) literal_run = delta_run = 0; while (literal_run < maxvals) { if (delta_map != 0) { uint32_t literal_run_ofs = __ffs(delta_map) - 1; literal_run += literal_run_ofs; delta_run = __ffs(~((delta_map >> literal_run_ofs) >> 1)); if (literal_run_ofs + delta_run == 32) { for (;;) { uint32_t delta_idx = (literal_run + delta_run) >> 5; delta_map = (delta_idx < 512 / 32) ? s->u.intrle.delta_map[delta_idx] : 0; if (delta_map != ~0) break; delta_run += 32; } delta_run += __ffs(~delta_map) - 1; } delta_run += 2; break; } literal_run += 32; delta_map = s->u.intrle.delta_map[(literal_run >> 5)]; } literal_run = min(literal_run, maxvals); s->u.intrle.literal_run = literal_run; s->u.intrle.delta_run = min(delta_run, maxvals - literal_run); } __syncthreads(); literal_run = s->u.intrle.literal_run; // Find minimum and maximum values if (literal_run > 0) { // Find min & max T vmin = (t < literal_run) ? v0 : std::numeric_limits<T>::max(); T vmax = (t < literal_run) ? v0 : std::numeric_limits<T>::min(); uint32_t literal_mode, literal_w; vmin = block_reduce(temp_storage).Reduce(vmin, cub::Min()); __syncthreads(); vmax = block_reduce(temp_storage).Reduce(vmax, cub::Max()); if (t == 0) { uint32_t mode1_w, mode2_w; typename std::make_unsigned<T>::type vrange_mode1, vrange_mode2; block_vmin = static_cast<uint64_t>(vmin); if constexpr (sizeof(T) > 4) { vrange_mode1 = (is_signed) ? max(zigzag(vmin), zigzag(vmax)) : vmax; vrange_mode2 = vmax - vmin; mode1_w = 8 - min(CountLeadingBytes64(vrange_mode1), 7); mode2_w = 8 - min(CountLeadingBytes64(vrange_mode2), 7); } else { vrange_mode1 = (is_signed) ? max(zigzag(vmin), zigzag(vmax)) : vmax; vrange_mode2 = vmax - vmin; mode1_w = 4 - min(CountLeadingBytes32(vrange_mode1), 3); mode2_w = 4 - min(CountLeadingBytes32(vrange_mode2), 3); } // Decide between mode1 & mode2 (also mode3 for length=2 repeat) if (vrange_mode2 == 0 && mode1_w > 1) { // Should only occur if literal_run==2 (otherwise would have resulted in repeat_run >= // 3) uint32_t bytecnt = 2; dst[0] = 0xC0 + ((literal_run - 1) >> 8); dst[1] = (literal_run - 1) & 0xff; bytecnt += StoreVarint(dst + 2, vrange_mode1); dst[bytecnt++] = 0; // Zero delta s->u.intrle.literal_mode = 3; s->u.intrle.literal_w = bytecnt; } else { uint32_t range, w; // Mode 2 base value cannot be bigger than max int64_t, i.e. the first bit has to be 0 if (vmin <= std::numeric_limits<int64_t>::max() and mode1_w > mode2_w and (literal_run - 1) * (mode1_w - mode2_w) > 4) { s->u.intrle.literal_mode = 2; w = mode2_w; range = (uint32_t)vrange_mode2; } else { s->u.intrle.literal_mode = 1; w = mode1_w; range = (uint32_t)vrange_mode1; } if (w == 1) w = (range >= 16) ? w << 3 : (range >= 4) ? 4 : (range >= 2) ? 2 : 1; else w <<= 3; // bytes -> bits s->u.intrle.literal_w = w; } } __syncthreads(); vmin = static_cast<T>(block_vmin); literal_mode = s->u.intrle.literal_mode; literal_w = s->u.intrle.literal_w; if (literal_mode == 1) { // Direct mode if (!t) { dst[0] = 0x40 + ((literal_w < 8) ? literal_w - 1 : kByteLengthToRLEv2_W[literal_w >> 3]) * 2 + ((literal_run - 1) >> 8); dst[1] = (literal_run - 1) & 0xff; } dst += 2; typename std::make_unsigned<T>::type zzv0 = v0; if (t < literal_run) { zzv0 = zigzag(v0); } if (literal_w < 8) { StoreBitsBigEndian(dst, zzv0, literal_w, literal_run, t); } else if (t < literal_run) { StoreBytesBigEndian(dst + t * (literal_w >> 3), zzv0, (literal_w >> 3)); } } else if (literal_mode == 2) { // Patched base mode if (!t) { uint32_t bw, pw = 1, pll, pgw = 1, bv_scale = (is_signed) ? 0 : 1; vmax = (is_signed) ? ((vmin < 0) ? -vmin : vmin) * 2 : vmin; bw = (sizeof(T) > 4) ? (8 - min(CountLeadingBytes64(vmax << bv_scale), 7)) : (4 - min(CountLeadingBytes32(vmax << bv_scale), 3)); if (zero_pll_war) { // Insert a dummy zero patch pll = 1; dst[4 + bw + ((literal_run * literal_w + 7) >> 3) + 0] = 0; dst[4 + bw + ((literal_run * literal_w + 7) >> 3) + 1] = 0; } else { pll = 0; } dst[0] = 0x80 + ((literal_w < 8) ? literal_w - 1 : kByteLengthToRLEv2_W[literal_w >> 3]) * 2 + ((literal_run - 1) >> 8); dst[1] = (literal_run - 1) & 0xff; dst[2] = ((bw - 1) << 5) | kByteLengthToRLEv2_W[pw]; dst[3] = ((pgw - 1) << 5) | pll; if (is_signed) { vmax >>= 1; vmax |= vmin & ((T)1 << (bw * 8 - 1)); } StoreBytesBigEndian(dst + 4, vmax, bw); s->u.intrle.hdr_bytes = 4 + bw; s->u.intrle.pl_bytes = (pll * (pw * 8 + pgw) + 7) >> 3; } __syncthreads(); dst += s->u.intrle.hdr_bytes; v0 -= (t < literal_run) ? vmin : 0; if (literal_w < 8) StoreBitsBigEndian(dst, (uint32_t)v0, literal_w, literal_run, t); else if (t < literal_run) StoreBytesBigEndian(dst + t * (literal_w >> 3), v0, (literal_w >> 3)); dst += s->u.intrle.pl_bytes; } else { // Delta mode dst += literal_w; literal_w = 0; } dst += (literal_run * literal_w + 7) >> 3; numvals -= literal_run; inpos += literal_run; out_cnt += literal_run; __syncthreads(); } delta_run = s->u.intrle.delta_run; if (delta_run > 0) { if (t == literal_run) { int64_t delta = (int64_t)v1 - (int64_t)v0; uint64_t delta_base = zigzag(v0); if (delta == 0 && delta_run >= 3 && delta_run <= 10) { // Short repeat uint32_t delta_bw = 8 - min(CountLeadingBytes64(delta_base), 7); dst[0] = ((delta_bw - 1) << 3) + (delta_run - 3); for (uint32_t i = 0, b = delta_bw * 8; i < delta_bw; i++) { b -= 8; dst[1 + i] = static_cast<uint8_t>(delta_base >> b); } s->u.intrle.hdr_bytes = 1 + delta_bw; } else { // Delta uint64_t delta_u = zigzag(delta); uint32_t bytecnt = 2; dst[0] = 0xC0 + ((delta_run - 1) >> 8); dst[1] = (delta_run - 1) & 0xff; bytecnt += StoreVarint(dst + bytecnt, delta_base); bytecnt += StoreVarint(dst + bytecnt, delta_u); s->u.intrle.hdr_bytes = bytecnt; } } __syncthreads(); dst += s->u.intrle.hdr_bytes; numvals -= delta_run; inpos += delta_run; out_cnt += delta_run; } } if (!t) { s->strm_pos[cid] = static_cast<uint32_t>(dst - s->stream.data_ptrs[cid]); } __syncthreads(); return out_cnt; } /** * @brief Store a group of strings as a single concatenated string * * @param[in] dst destination buffer * @param[in] strenc string encoder state * @param[in] len(t) string length (per thread) * @param[in] t thread id */ static __device__ void StoreStringData(uint8_t* dst, strdata_enc_state_s* strenc, uint32_t len, int t) { // Start with summing up all the lengths uint32_t pos = len; uint32_t wt = t & 0x1f; for (uint32_t n = 1; n < 32; n <<= 1) { uint32_t tmp = shuffle(pos, (wt & ~n) | (n - 1)); pos += (wt & n) ? tmp : 0; } if (wt == 0x1f) { strenc->lengths_red[t >> 5] = pos; } dst += pos - len; __syncthreads(); if (t < 32) { uint32_t wlen = (wt < 16) ? strenc->lengths_red[wt] : 0; uint32_t wpos = wlen; for (uint32_t n = 1; n < 16; n <<= 1) { uint32_t tmp = shuffle(wpos, (wt & ~n) | (n - 1)); wpos += (wt & n) ? tmp : 0; } if (wt < 16) { strenc->lengths_red[wt] = wpos - wlen; } if (wt == 0xf) { strenc->char_count = wpos; // Update stream position } } __syncthreads(); // TBD: Might be more efficient to loop over 4 strings and copy 8 consecutive character at a time // rather than have each thread to a memcpy if (len > 0) { memcpy(dst + strenc->lengths_red[t >> 5], strenc->str_data[t], len); } } /** * @brief In-place conversion from lengths to positions * * @param[in] vals input values * @param[in] numvals number of values * @param[in] t thread id */ template <class T> inline __device__ void lengths_to_positions(T* vals, uint32_t numvals, unsigned int t) { for (uint32_t n = 1; n < numvals; n <<= 1) { __syncthreads(); if ((t & n) && (t < numvals)) vals[t] += vals[(t & ~n) | (n - 1)]; } } template <int block_size, typename Storage> static __device__ void encode_null_mask(orcenc_state_s* s, bitmask_type const* pushdown_mask, Storage& scan_storage, int t) { if (s->stream.ids[CI_PRESENT] < 0) return; auto const column = *s->chunk.column; while (s->present_rows < s->chunk.null_mask_num_rows or s->numvals > 0) { // Number of rows read so far auto present_rows = s->present_rows; // valid_buf capacity is byte per thread in block auto const buf_available_bits = encode_block_size * 8 - s->numvals; // Number of rows for the block to process in this iteration auto const nrows = min(s->chunk.null_mask_num_rows - present_rows, buf_available_bits); // Number of rows for this thread to process in this iteration auto const t_nrows = min(max(static_cast<int32_t>(nrows) - t * 8, 0), 8); auto const row = s->chunk.null_mask_start_row + present_rows + t * 8; auto get_mask_byte = [&](bitmask_type const* mask, size_type offset) -> uint8_t { if (t_nrows == 0) return 0; if (mask == nullptr) return 0xff; auto const begin_offset = row + offset; auto const end_offset = min(begin_offset + 8, offset + column.size()); auto const mask_word = cudf::detail::get_mask_offset_word(mask, 0, begin_offset, end_offset); return mask_word & 0xff; }; uint8_t pd_byte = (1 << t_nrows) - 1; uint32_t pd_set_cnt = t_nrows; uint32_t offset = t_nrows != 0 ? t * 8 : nrows; if (pushdown_mask != nullptr) { pd_byte = get_mask_byte(pushdown_mask, 0) & ((1 << t_nrows) - 1); pd_set_cnt = __popc(pd_byte); // Scan the number of valid bits to get dst offset for each thread cub::BlockScan<uint32_t, block_size>(scan_storage).ExclusiveSum(pd_set_cnt, offset); } auto const mask_byte = get_mask_byte(column.null_mask(), column.offset()); auto dst_offset = offset + s->nnz; auto vbuf_bit_idx = [](int row) { // valid_buf is a circular buffer with validity of 8 rows in each element return row % (encode_block_size * 8); }; if (dst_offset % 8 == 0 and pd_set_cnt == 8) { s->valid_buf[vbuf_bit_idx(dst_offset) / 8] = mask_byte; } else { for (auto bit_idx = 0; bit_idx < t_nrows; ++bit_idx) { // skip bits where pushdown mask is not set if (not(pd_byte & (1 << bit_idx))) continue; if (mask_byte & (1 << bit_idx)) { set_bit(reinterpret_cast<uint32_t*>(s->valid_buf), vbuf_bit_idx(dst_offset++)); } else { clear_bit(reinterpret_cast<uint32_t*>(s->valid_buf), vbuf_bit_idx(dst_offset++)); } } } __syncthreads(); if (t == block_size - 1) { // Number of loaded rows, available for encode s->numvals += offset + pd_set_cnt; // Number of loaded rows (different from present_rows because of pushdown masks) s->nnz += offset + pd_set_cnt; } present_rows += nrows; if (!t) { s->present_rows = present_rows; } __syncthreads(); // RLE encode the present stream if (s->numvals > ((present_rows < s->chunk.null_mask_num_rows) ? 130 * 8 : 0)) { auto const flush = (present_rows < s->chunk.null_mask_num_rows) ? 0 : 7; auto const nbytes_out = (s->numvals + flush) / 8; auto const nrows_encoded = ByteRLE<CI_PRESENT, 0x1ff>(s, s->valid_buf, s->present_out / 8, nbytes_out, flush, t) * 8; if (!t) { // Number of rows encoded so far s->present_out += nrows_encoded; s->numvals -= min(s->numvals, nrows_encoded); } __syncthreads(); } } // reset shared state if (t == 0) { s->nnz = 0; } } /** * @brief Encode column data * * @param[in] chunks encoder chunks device array [column][rowgroup] * @param[in, out] streams chunk streams device array [column][rowgroup] */ // blockDim {`encode_block_size`,1,1} template <int block_size> __global__ void __launch_bounds__(block_size) gpuEncodeOrcColumnData(device_2dspan<EncChunk const> chunks, device_2dspan<encoder_chunk_streams> streams) { __shared__ __align__(16) orcenc_state_s state_g; __shared__ union { typename cub::BlockScan<uint32_t, block_size>::TempStorage scan_u32; typename cub::BlockReduce<int32_t, block_size>::TempStorage i32; typename cub::BlockReduce<int64_t, block_size>::TempStorage i64; typename cub::BlockReduce<uint32_t, block_size>::TempStorage u32; typename cub::BlockReduce<uint64_t, block_size>::TempStorage u64; } temp_storage; orcenc_state_s* const s = &state_g; uint32_t col_id = blockIdx.x; uint32_t group_id = blockIdx.y; int t = threadIdx.x; if (t == 0) { s->chunk = chunks[col_id][group_id]; s->stream = streams[col_id][group_id]; s->cur_row = 0; s->present_rows = 0; s->present_out = 0; s->numvals = 0; s->numlengths = 0; s->nnz = 0; s->strm_pos[CI_DATA] = 0; s->strm_pos[CI_PRESENT] = 0; s->strm_pos[CI_INDEX] = 0; // Dictionary data is encoded in a separate kernel s->strm_pos[CI_DATA2] = s->chunk.encoding_kind == DICTIONARY_V2 ? s->stream.lengths[CI_DATA2] : 0; s->strm_pos[CI_DICTIONARY] = s->chunk.encoding_kind == DICTIONARY_V2 ? s->stream.lengths[CI_DICTIONARY] : 0; } __syncthreads(); auto const pushdown_mask = [&]() -> cudf::bitmask_type const* { auto const parent_index = s->chunk.column->parent_index; if (!parent_index.has_value()) return nullptr; return chunks[parent_index.value()][0].column->pushdown_mask; }(); encode_null_mask<block_size>(s, pushdown_mask, temp_storage.scan_u32, t); __syncthreads(); auto const column = *s->chunk.column; while (s->cur_row < s->chunk.num_rows || s->numvals + s->numlengths != 0) { // Fetch non-null values auto const length_stream_only = s->chunk.type_kind == LIST or s->chunk.type_kind == MAP; if (not length_stream_only && s->stream.data_ptrs[CI_DATA] == nullptr) { // Pass-through __syncthreads(); if (!t) { s->cur_row = s->chunk.num_rows; s->strm_pos[CI_DATA] = s->chunk.num_rows * s->chunk.dtype_len; } } else if (s->cur_row < s->chunk.num_rows) { uint32_t maxnumvals = (s->chunk.type_kind == BOOLEAN) ? 2048 : 1024; uint32_t nrows = min(min(s->chunk.num_rows - s->cur_row, maxnumvals - max(s->numvals, s->numlengths)), encode_block_size); auto const row = s->chunk.start_row + s->cur_row + t; auto const is_value_valid = [&]() { if (t >= nrows) return false; return bit_value_or(pushdown_mask, column.offset() + row, true) and bit_value_or(column.null_mask(), column.offset() + row, true); }(); s->buf.u32[t] = is_value_valid ? 1u : 0u; // TODO: Could use a faster reduction relying on _popc() for the initial phase lengths_to_positions(s->buf.u32, encode_block_size, t); __syncthreads(); if (is_value_valid) { int nz_idx = (s->nnz + s->buf.u32[t] - 1) & (maxnumvals - 1); switch (s->chunk.type_kind) { case INT: case DATE: case FLOAT: s->vals.u32[nz_idx] = column.element<uint32_t>(row); break; case DOUBLE: case LONG: s->vals.u64[nz_idx] = column.element<uint64_t>(row); break; case SHORT: s->vals.u32[nz_idx] = column.element<uint16_t>(row); break; case BOOLEAN: case BYTE: s->vals.u8[nz_idx] = column.element<uint8_t>(row); break; case TIMESTAMP: { int64_t ts = column.element<int64_t>(row); int32_t ts_scale = powers_of_ten[9 - min(s->chunk.scale, 9)]; int64_t seconds = ts / ts_scale; int64_t nanos = (ts - seconds * ts_scale); s->vals.i64[nz_idx] = seconds - orc_utc_epoch; if (nanos != 0) { // Trailing zeroes are encoded in the lower 3-bits uint32_t zeroes = 0; nanos *= powers_of_ten[min(s->chunk.scale, 9)]; if (!(nanos % 100)) { nanos /= 100; zeroes = 1; while (zeroes < 7 && !(nanos % 10)) { nanos /= 10; zeroes++; } } nanos = (nanos << 3) + zeroes; } s->lengths.u64[nz_idx] = nanos; break; } case STRING: if (s->chunk.encoding_kind == DICTIONARY_V2) { uint32_t dict_idx = s->chunk.dict_index[row]; if (dict_idx > 0x7fff'ffffu) { dict_idx = s->chunk.dict_index[dict_idx & 0x7fff'ffffu]; } // translate dictionary index to sorted order, if enabled if (s->chunk.dict_data_order != nullptr) { dict_idx = s->chunk.dict_data_order[dict_idx]; } s->vals.u32[nz_idx] = dict_idx; } else { string_view value = column.element<string_view>(row); s->u.strenc.str_data[s->buf.u32[t] - 1] = value.data(); s->lengths.u32[nz_idx] = value.size_bytes(); } break; // Reusing the lengths array for the scale stream // Note: can be written in a faster manner, given that all values are equal case DECIMAL: s->lengths.u32[nz_idx] = zigzag(s->chunk.scale); break; case LIST: case MAP: { auto const& offsets = column.child(lists_column_view::offsets_column_index); // Compute list length from the offsets s->lengths.u32[nz_idx] = offsets.element<size_type>(row + 1 + column.offset()) - offsets.element<size_type>(row + column.offset()); } break; default: break; } } __syncthreads(); if (s->chunk.type_kind == STRING && s->chunk.encoding_kind != DICTIONARY_V2) { // Store string data uint32_t nz = s->buf.u32[511]; uint32_t nz_idx = (s->nnz + t) & 0x3ff; uint32_t len = (t < nz && s->u.strenc.str_data[t]) ? s->lengths.u32[nz_idx] : 0; StoreStringData(s->stream.data_ptrs[CI_DATA] + s->strm_pos[CI_DATA], &s->u.strenc, len, t); if (!t) { s->strm_pos[CI_DATA] += s->u.strenc.char_count; } __syncthreads(); } else if (s->chunk.type_kind == BOOLEAN) { // bool8 -> 8x bool1 uint32_t nz = s->buf.u32[511]; uint8_t n = ((s->nnz + nz) - (s->nnz & ~7) + 7) >> 3; if (t < n) { uint32_t idx8 = (s->nnz & ~7) + (t << 3); s->lengths.u8[((s->nnz >> 3) + t) & 0x1ff] = ((s->vals.u8[(idx8 + 0) & 0x7ff] & 1) << 7) | ((s->vals.u8[(idx8 + 1) & 0x7ff] & 1) << 6) | ((s->vals.u8[(idx8 + 2) & 0x7ff] & 1) << 5) | ((s->vals.u8[(idx8 + 3) & 0x7ff] & 1) << 4) | ((s->vals.u8[(idx8 + 4) & 0x7ff] & 1) << 3) | ((s->vals.u8[(idx8 + 5) & 0x7ff] & 1) << 2) | ((s->vals.u8[(idx8 + 6) & 0x7ff] & 1) << 1) | ((s->vals.u8[(idx8 + 7) & 0x7ff] & 1) << 0); } __syncthreads(); } if (!t) { uint32_t nz = s->buf.u32[511]; s->nnz += nz; s->numvals += nz; s->numlengths += (s->chunk.type_kind == TIMESTAMP || s->chunk.type_kind == DECIMAL || s->chunk.type_kind == LIST || s->chunk.type_kind == MAP || (s->chunk.type_kind == STRING && s->chunk.encoding_kind != DICTIONARY_V2)) ? nz : 0; s->cur_row += nrows; } __syncthreads(); // Encode values if (s->numvals > 0) { uint32_t flush = (s->cur_row == s->chunk.num_rows) ? 7 : 0, n; switch (s->chunk.type_kind) { case SHORT: case INT: case DATE: n = IntegerRLE<CI_DATA, int32_t, true, 0x3ff, block_size>( s, s->vals.i32, s->nnz - s->numvals, s->numvals, t, temp_storage.i32); break; case LONG: case TIMESTAMP: n = IntegerRLE<CI_DATA, int64_t, true, 0x3ff, block_size>( s, s->vals.i64, s->nnz - s->numvals, s->numvals, t, temp_storage.i64); break; case BYTE: n = ByteRLE<CI_DATA, 0x3ff>(s, s->vals.u8, s->nnz - s->numvals, s->numvals, flush, t); break; case BOOLEAN: n = ByteRLE<CI_DATA, 0x1ff>(s, s->lengths.u8, (s->nnz - s->numvals + flush) >> 3, (s->numvals + flush) >> 3, flush, t) * 8; break; case FLOAT: StoreBytes<CI_DATA, 0xfff>(s, s->vals.u8, (s->nnz - s->numvals) * 4, s->numvals * 4, t); n = s->numvals; break; case DOUBLE: StoreBytes<CI_DATA, 0x1fff>( s, s->vals.u8, (s->nnz - s->numvals) * 8, s->numvals * 8, t); n = s->numvals; break; case STRING: if (s->chunk.encoding_kind == DICTIONARY_V2) { n = IntegerRLE<CI_DATA, uint32_t, false, 0x3ff, block_size>( s, s->vals.u32, s->nnz - s->numvals, s->numvals, t, temp_storage.u32); } else { n = s->numvals; } break; case DECIMAL: { if (is_value_valid) { auto const id = column.type().id(); __uint128_t const zz_val = id == type_id::DECIMAL32 ? zigzag(column.element<int32_t>(row)) : id == type_id::DECIMAL64 ? zigzag(column.element<int64_t>(row)) : zigzag(column.element<__int128_t>(row)); auto const offset = (row == s->chunk.start_row) ? 0 : s->chunk.decimal_offsets[row - 1]; StoreVarint(s->stream.data_ptrs[CI_DATA] + offset, zz_val); } n = s->numvals; } break; default: n = s->numvals; break; } __syncthreads(); if (!t) { s->numvals -= min(n, s->numvals); } } // Encode secondary stream values if (s->numlengths > 0) { uint32_t n; switch (s->chunk.type_kind) { case TIMESTAMP: n = IntegerRLE<CI_DATA2, uint64_t, false, 0x3ff, block_size>( s, s->lengths.u64, s->nnz - s->numlengths, s->numlengths, t, temp_storage.u64); break; case DECIMAL: case LIST: case MAP: case STRING: n = IntegerRLE<CI_DATA2, uint32_t, false, 0x3ff, block_size>( s, s->lengths.u32, s->nnz - s->numlengths, s->numlengths, t, temp_storage.u32); break; default: n = s->numlengths; break; } __syncthreads(); if (!t) { s->numlengths -= min(n, s->numlengths); } } } __syncthreads(); } __syncthreads(); if (t <= CI_PRESENT && s->stream.ids[t] >= 0) { // Update actual compressed length // (not needed for decimal data, whose exact size is known before encode) if (!(t == CI_DATA && s->chunk.type_kind == DECIMAL)) streams[col_id][group_id].lengths[t] = s->strm_pos[t]; if (!s->stream.data_ptrs[t]) { streams[col_id][group_id].data_ptrs[t] = static_cast<uint8_t*>(const_cast<void*>(column.head())) + (column.offset() + s->chunk.start_row) * s->chunk.dtype_len; } } } /** * @brief Encode column dictionaries * * @param[in] stripes Stripe dictionaries device array * @param[in] columns Pre-order flattened device array of ORC column views * @param[in] chunks EncChunk device array [rowgroup][column] * @param[in] num_columns Number of columns */ // blockDim {512,1,1} template <int block_size> __global__ void __launch_bounds__(block_size) gpuEncodeStringDictionaries(stripe_dictionary const* stripes, device_span<orc_column_device_view const> columns, device_2dspan<EncChunk const> chunks, device_2dspan<encoder_chunk_streams> streams) { __shared__ __align__(16) orcenc_state_s state_g; __shared__ typename cub::BlockReduce<uint32_t, block_size>::TempStorage temp_storage; orcenc_state_s* const s = &state_g; uint32_t stripe_id = blockIdx.x; uint32_t cid = (blockIdx.y) ? CI_DICTIONARY : CI_DATA2; int t = threadIdx.x; if (t == 0) s->u.dict_stripe = &stripes[stripe_id]; __syncthreads(); auto const strm_ptr = &streams[s->u.dict_stripe->column_idx][s->u.dict_stripe->start_rowgroup]; if (t == 0) { s->chunk = chunks[s->u.dict_stripe->column_idx][s->u.dict_stripe->start_rowgroup]; s->stream = *strm_ptr; s->strm_pos[cid] = 0; s->numlengths = 0; s->nrows = s->u.dict_stripe->entry_count; s->cur_row = 0; } auto const string_column = columns[s->u.dict_stripe->column_idx]; auto const dict_data = s->u.dict_stripe->data; __syncthreads(); if (s->chunk.encoding_kind != DICTIONARY_V2) { return; // This column isn't using dictionary encoding -> bail out } while (s->cur_row < s->nrows || s->numlengths != 0) { uint32_t numvals = min(s->nrows - s->cur_row, min(1024 - s->numlengths, 512)); uint32_t string_idx = (t < numvals) ? dict_data[s->cur_row + t] : 0; if (cid == CI_DICTIONARY) { // Encoding string contents char const* ptr = nullptr; uint32_t count = 0; if (t < numvals) { auto string_val = string_column.element<string_view>(string_idx); ptr = string_val.data(); count = string_val.size_bytes(); } s->u.strenc.str_data[t] = ptr; StoreStringData(s->stream.data_ptrs[CI_DICTIONARY] + s->strm_pos[CI_DICTIONARY], &s->u.strenc, (ptr) ? count : 0, t); if (!t) { s->strm_pos[CI_DICTIONARY] += s->u.strenc.char_count; } } else { // Encoding string lengths uint32_t count = (t < numvals) ? static_cast<uint32_t>(string_column.element<string_view>(string_idx).size_bytes()) : 0; uint32_t nz_idx = (s->cur_row + t) & 0x3ff; if (t < numvals) s->lengths.u32[nz_idx] = count; __syncthreads(); if (s->numlengths + numvals > 0) { uint32_t n = IntegerRLE<CI_DATA2, uint32_t, false, 0x3ff, block_size>( s, s->lengths.u32, s->cur_row, s->numlengths + numvals, t, temp_storage); __syncthreads(); if (!t) { s->numlengths += numvals; s->numlengths -= min(n, s->numlengths); } } } if (t == 0) { s->cur_row += numvals; } __syncthreads(); } if (t == 0) { strm_ptr->lengths[cid] = s->strm_pos[cid]; } } /** * @brief Merge chunked column data into a single contiguous stream * * @param[in,out] strm_desc StripeStream device array [stripe][stream] * @param[in,out] streams List of encoder chunk streams [column][rowgroup] */ // blockDim {compact_streams_block_size,1,1} __global__ void __launch_bounds__(compact_streams_block_size) gpuCompactOrcDataStreams(device_2dspan<StripeStream> strm_desc, device_2dspan<encoder_chunk_streams> streams) { __shared__ __align__(16) StripeStream ss; auto const stripe_id = blockIdx.x; auto const stream_id = blockIdx.y; auto const t = threadIdx.x; if (t == 0) { ss = strm_desc[stripe_id][stream_id]; } __syncthreads(); if (ss.data_ptr == nullptr) { return; } auto const cid = ss.stream_type; auto dst_ptr = ss.data_ptr; for (auto group = ss.first_chunk_id; group < ss.first_chunk_id + ss.num_chunks; ++group) { auto const len = streams[ss.column_id][group].lengths[cid]; if (len > 0) { auto const src_ptr = streams[ss.column_id][group].data_ptrs[cid]; for (uint32_t i = t; i < len; i += blockDim.x) { dst_ptr[i] = src_ptr[i]; } __syncthreads(); } if (t == 0) { streams[ss.column_id][group].data_ptrs[cid] = dst_ptr; } dst_ptr += len; } } /** * @brief Initializes compression input/output structures * * @param[in] strm_desc StripeStream device array [stripe][stream] * @param[in] chunks EncChunk device array [rowgroup][column] * @param[out] inputs Per-block compression input buffers * @param[out] outputs Per-block compression output buffers * @param[out] results Per-block compression status * @param[in] compressed_bfr Compression output buffer * @param[in] comp_blk_size Compression block size * @param[in] max_comp_blk_size Max size of any block after compression * @param[in] comp_block_align Required alignment for compressed blocks */ // blockDim {256,1,1} __global__ void __launch_bounds__(256) gpuInitCompressionBlocks(device_2dspan<StripeStream const> strm_desc, device_2dspan<encoder_chunk_streams> streams, // const? device_span<device_span<uint8_t const>> inputs, device_span<device_span<uint8_t>> outputs, device_span<compression_result> results, device_span<uint8_t> compressed_bfr, uint32_t comp_blk_size, uint32_t max_comp_blk_size, uint32_t comp_block_align) { __shared__ __align__(16) StripeStream ss; __shared__ uint8_t* uncomp_base_g; auto const padded_block_header_size = util::round_up_unsafe(block_header_size, comp_block_align); auto const padded_comp_block_size = util::round_up_unsafe(max_comp_blk_size, comp_block_align); auto const stripe_id = blockIdx.x; auto const stream_id = blockIdx.y; uint32_t t = threadIdx.x; uint32_t num_blocks; uint8_t *src, *dst; if (t == 0) { ss = strm_desc[stripe_id][stream_id]; uncomp_base_g = streams[ss.column_id][ss.first_chunk_id].data_ptrs[ss.stream_type]; } __syncthreads(); src = uncomp_base_g; dst = compressed_bfr.data() + ss.bfr_offset; num_blocks = (ss.stream_size > 0) ? (ss.stream_size - 1) / comp_blk_size + 1 : 1; for (uint32_t b = t; b < num_blocks; b += 256) { uint32_t blk_size = min(comp_blk_size, ss.stream_size - min(b * comp_blk_size, ss.stream_size)); inputs[ss.first_block + b] = {src + b * comp_blk_size, blk_size}; auto const dst_offset = padded_block_header_size + b * (padded_block_header_size + padded_comp_block_size); outputs[ss.first_block + b] = {dst + dst_offset, max_comp_blk_size}; results[ss.first_block + b] = {0, compression_status::FAILURE}; } } /** * @brief Compacts compressed blocks in a single contiguous stream, and update 3-byte block length *fields * * @param[in,out] strm_desc StripeStream device array [stripe][stream] * @param[in] chunks EncChunk device array [rowgroup][column] * @param[in] inputs Per-block compression input buffers * @param[out] outputs Per-block compression output buffers * @param[out] results Per-block compression status * @param[in] compressed_bfr Compression output buffer * @param[in] comp_blk_size Compression block size * @param[in] max_comp_blk_size Max size of any block after compression */ // blockDim {1024,1,1} __global__ void __launch_bounds__(1024) gpuCompactCompressedBlocks(device_2dspan<StripeStream> strm_desc, device_span<device_span<uint8_t const> const> inputs, device_span<device_span<uint8_t> const> outputs, device_span<compression_result> results, device_span<uint8_t> compressed_bfr, uint32_t comp_blk_size, uint32_t max_comp_blk_size) { __shared__ __align__(16) StripeStream ss; __shared__ uint8_t const* comp_src_g; __shared__ uint32_t comp_len_g; auto const stripe_id = blockIdx.x; auto const stream_id = blockIdx.y; uint32_t t = threadIdx.x; uint32_t num_blocks, b, blk_size; uint8_t const* src; uint8_t* dst; if (t == 0) ss = strm_desc[stripe_id][stream_id]; __syncthreads(); num_blocks = (ss.stream_size > 0) ? (ss.stream_size - 1) / comp_blk_size + 1 : 0; dst = compressed_bfr.data() + ss.bfr_offset; b = 0; do { if (t == 0) { auto const src_len = min(comp_blk_size, ss.stream_size - min(b * comp_blk_size, ss.stream_size)); auto dst_len = (results[ss.first_block + b].status == compression_status::SUCCESS) ? results[ss.first_block + b].bytes_written : src_len; uint32_t blk_size24{}; // Only use the compressed block if it's smaller than the uncompressed // If compression failed, dst_len == src_len, so the uncompressed block will be used if (src_len < dst_len) { // Copy from uncompressed source src = inputs[ss.first_block + b].data(); results[ss.first_block + b].bytes_written = src_len; dst_len = src_len; blk_size24 = dst_len * 2 + 1; } else { // Compressed block src = outputs[ss.first_block + b].data(); blk_size24 = dst_len * 2 + 0; } dst[0] = static_cast<uint8_t>(blk_size24 >> 0); dst[1] = static_cast<uint8_t>(blk_size24 >> 8); dst[2] = static_cast<uint8_t>(blk_size24 >> 16); comp_src_g = src; comp_len_g = dst_len; } __syncthreads(); src = comp_src_g; blk_size = comp_len_g; dst += 3; // skip over length written by thread0 if (src != dst) { for (uint32_t i = 0; i < blk_size; i += 1024) { uint8_t v = (i + t < blk_size) ? src[i + t] : 0; __syncthreads(); if (i + t < blk_size) { dst[i + t] = v; } } } dst += blk_size; __syncthreads(); } while (++b < num_blocks); // Update stripe stream with the compressed size if (t == 0) { strm_desc[stripe_id][stream_id].stream_size = static_cast<uint32_t>(dst - (compressed_bfr.data() + ss.bfr_offset)); } } // Holds a non-owning view of a decimal column's element sizes struct decimal_column_element_sizes { uint32_t col_idx; device_span<uint32_t> sizes; }; // Converts sizes of individual decimal elements to offsets within each row group // Conversion is done in-place template <int block_size> __global__ void decimal_sizes_to_offsets_kernel(device_2dspan<rowgroup_rows const> rg_bounds, device_span<decimal_column_element_sizes> sizes) { using block_scan = cub::BlockScan<uint32_t, block_size>; __shared__ typename block_scan::TempStorage scan_storage; int const t = threadIdx.x; auto const& col_elem_sizes = sizes[blockIdx.x]; auto const& row_group = rg_bounds[blockIdx.y][col_elem_sizes.col_idx]; auto const elem_sizes = col_elem_sizes.sizes.data() + row_group.begin; uint32_t initial_value = 0; // Do a series of block sums, storing results in the array as we go for (int64_t pos = 0; pos < row_group.size(); pos += block_size) { auto const tidx = pos + t; auto tval = tidx < row_group.size() ? elem_sizes[tidx] : 0u; uint32_t block_sum = 0; block_scan(scan_storage).InclusiveSum(tval, tval, block_sum); if (tidx < row_group.size()) { elem_sizes[tidx] = tval + initial_value; } initial_value += block_sum; } } void EncodeOrcColumnData(device_2dspan<EncChunk const> chunks, device_2dspan<encoder_chunk_streams> streams, rmm::cuda_stream_view stream) { dim3 dim_block(encode_block_size, 1); // `encode_block_size` threads per chunk dim3 dim_grid(chunks.size().first, chunks.size().second); gpuEncodeOrcColumnData<encode_block_size> <<<dim_grid, dim_block, 0, stream.value()>>>(chunks, streams); } void EncodeStripeDictionaries(stripe_dictionary const* stripes, device_span<orc_column_device_view const> columns, device_2dspan<EncChunk const> chunks, uint32_t num_string_columns, uint32_t num_stripes, device_2dspan<encoder_chunk_streams> enc_streams, rmm::cuda_stream_view stream) { dim3 dim_block(512, 1); // 512 threads per dictionary dim3 dim_grid(num_string_columns * num_stripes, 2); gpuEncodeStringDictionaries<512> <<<dim_grid, dim_block, 0, stream.value()>>>(stripes, columns, chunks, enc_streams); } void CompactOrcDataStreams(device_2dspan<StripeStream> strm_desc, device_2dspan<encoder_chunk_streams> enc_streams, rmm::cuda_stream_view stream) { dim3 dim_block(compact_streams_block_size, 1); dim3 dim_grid(strm_desc.size().first, strm_desc.size().second); gpuCompactOrcDataStreams<<<dim_grid, dim_block, 0, stream.value()>>>(strm_desc, enc_streams); } std::optional<writer_compression_statistics> CompressOrcDataStreams( device_span<uint8_t> compressed_data, uint32_t num_compressed_blocks, CompressionKind compression, uint32_t comp_blk_size, uint32_t max_comp_blk_size, uint32_t comp_block_align, bool collect_statistics, device_2dspan<StripeStream> strm_desc, device_2dspan<encoder_chunk_streams> enc_streams, device_span<compression_result> comp_res, rmm::cuda_stream_view stream) { rmm::device_uvector<device_span<uint8_t const>> comp_in(num_compressed_blocks, stream); rmm::device_uvector<device_span<uint8_t>> comp_out(num_compressed_blocks, stream); dim3 dim_block_init(256, 1); dim3 dim_grid(strm_desc.size().first, strm_desc.size().second); gpuInitCompressionBlocks<<<dim_grid, dim_block_init, 0, stream.value()>>>(strm_desc, enc_streams, comp_in, comp_out, comp_res, compressed_data, comp_blk_size, max_comp_blk_size, comp_block_align); if (compression == SNAPPY) { try { if (nvcomp::is_compression_disabled(nvcomp::compression_type::SNAPPY)) { gpu_snap(comp_in, comp_out, comp_res, stream); } else { nvcomp::batched_compress( nvcomp::compression_type::SNAPPY, comp_in, comp_out, comp_res, stream); } } catch (...) { // There was an error in compressing so set an error status for each block thrust::for_each( rmm::exec_policy(stream), comp_res.begin(), comp_res.end(), [] __device__(compression_result & stat) { stat.status = compression_status::FAILURE; }); // Since SNAPPY is the default compression (may not be explicitly requested), fall back to // writing without compression CUDF_LOG_WARN("ORC writer: compression failed, writing uncompressed data"); } } else if (compression == ZLIB) { if (auto const reason = nvcomp::is_compression_disabled(nvcomp::compression_type::DEFLATE); reason) { CUDF_FAIL("Compression error: " + reason.value()); } nvcomp::batched_compress( nvcomp::compression_type::DEFLATE, comp_in, comp_out, comp_res, stream); } else if (compression == ZSTD) { if (auto const reason = nvcomp::is_compression_disabled(nvcomp::compression_type::ZSTD); reason) { CUDF_FAIL("Compression error: " + reason.value()); } nvcomp::batched_compress(nvcomp::compression_type::ZSTD, comp_in, comp_out, comp_res, stream); } else if (compression != NONE) { CUDF_FAIL("Unsupported compression type"); } dim3 dim_block_compact(1024, 1); gpuCompactCompressedBlocks<<<dim_grid, dim_block_compact, 0, stream.value()>>>( strm_desc, comp_in, comp_out, comp_res, compressed_data, comp_blk_size, max_comp_blk_size); if (collect_statistics) { return cudf::io::collect_compression_statistics(comp_in, comp_res, stream); } else { return std::nullopt; } } void decimal_sizes_to_offsets(device_2dspan<rowgroup_rows const> rg_bounds, std::map<uint32_t, rmm::device_uvector<uint32_t>>& elem_sizes, rmm::cuda_stream_view stream) { if (rg_bounds.count() == 0) return; // Convert map to a vector of views of the `elem_sizes` device buffers std::vector<decimal_column_element_sizes> h_sizes; h_sizes.reserve(elem_sizes.size()); std::transform(elem_sizes.begin(), elem_sizes.end(), std::back_inserter(h_sizes), [](auto& p) { return decimal_column_element_sizes{p.first, p.second}; }); // Copy the vector of views to the device so that we can pass it to the kernel auto d_sizes = cudf::detail::make_device_uvector_async<decimal_column_element_sizes>( h_sizes, stream, rmm::mr::get_current_device_resource()); constexpr int block_size = 256; dim3 const grid_size{static_cast<unsigned int>(elem_sizes.size()), // num decimal columns static_cast<unsigned int>(rg_bounds.size().first)}; // num rowgroups decimal_sizes_to_offsets_kernel<block_size> <<<grid_size, block_size, 0, stream.value()>>>(rg_bounds, d_sizes); } } // namespace gpu } // namespace orc } // namespace io } // namespace cudf
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/orc/reader_impl.hpp
/* * Copyright (c) 2019-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #pragma once #include "aggregate_orc_metadata.hpp" #include "orc.hpp" #include "orc_gpu.hpp" #include <io/utilities/column_buffer.hpp> #include <io/utilities/hostdevice_vector.hpp> #include <cudf/io/datasource.hpp> #include <cudf/io/detail/orc.hpp> #include <cudf/io/orc.hpp> #include <rmm/cuda_stream_view.hpp> #include <memory> #include <string> #include <utility> #include <vector> namespace cudf::io::detail::orc { using namespace cudf::io::orc; namespace { struct reader_column_meta; } /** * @brief Implementation for ORC reader. */ class reader::impl { public: /** * @brief Constructor from a dataset source with reader options. * * @param sources Dataset sources * @param options Settings for controlling reading behavior * @param stream CUDA stream used for device memory operations and kernel launches * @param mr Device memory resource to use for device memory allocation */ explicit impl(std::vector<std::unique_ptr<datasource>>&& sources, orc_reader_options const& options, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr); /** * @brief Read an entire set or a subset of data and returns a set of columns * * @param skip_rows Number of rows to skip from the start * @param num_rows_opt Optional number of rows to read * @param stripes Indices of individual stripes to load if non-empty * @return The set of columns along with metadata */ table_with_metadata read(uint64_t skip_rows, std::optional<size_type> const& num_rows_opt, std::vector<std::vector<size_type>> const& stripes); private: rmm::cuda_stream_view const _stream; rmm::mr::device_memory_resource* const _mr; std::vector<std::unique_ptr<datasource>> const _sources; // Unused but owns data for `_metadata` cudf::io::orc::detail::aggregate_orc_metadata _metadata; cudf::io::orc::detail::column_hierarchy const _selected_columns; // Need to be after _metadata data_type const _timestamp_type; // Override output timestamp resolution bool const _use_index; // Enable or disable attempt to use row index for parsing bool const _use_np_dtypes; // Enable or disable the conversion to numpy-compatible dtypes std::vector<std::string> const _decimal128_columns; // Control decimals conversion std::unique_ptr<reader_column_meta> const _col_meta; // Track of orc mapping and child details }; } // namespace cudf::io::detail::orc
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/orc/reader_impl.cu
/* * Copyright (c) 2019-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * @file reader_impl.cu * @brief cuDF-IO ORC reader class implementation */ #include "orc.hpp" #include "orc_gpu.hpp" #include "reader_impl.hpp" #include <io/comp/gpuinflate.hpp> #include <io/comp/nvcomp_adapter.hpp> #include <io/utilities/config_utils.hpp> #include <cudf/detail/timezone.hpp> #include <cudf/detail/utilities/integer_utils.hpp> #include <cudf/detail/utilities/vector_factories.hpp> #include <cudf/table/table.hpp> #include <cudf/utilities/bit.hpp> #include <cudf/utilities/error.hpp> #include <rmm/cuda_stream_view.hpp> #include <rmm/device_buffer.hpp> #include <rmm/device_scalar.hpp> #include <rmm/device_uvector.hpp> #include <rmm/exec_policy.hpp> #include <thrust/copy.h> #include <thrust/fill.h> #include <thrust/for_each.h> #include <thrust/iterator/counting_iterator.h> #include <thrust/pair.h> #include <thrust/scan.h> #include <thrust/transform.h> #include <algorithm> #include <iterator> namespace cudf::io::detail::orc { using namespace cudf::io::orc; namespace { /** * @brief Keeps track of orc mapping and child column details. */ struct reader_column_meta { // Mapping between column id in orc to processing order. std::vector<std::vector<size_type>> orc_col_map; // Number of rows in child columns. std::vector<uint32_t> num_child_rows; // Consists of parent column valid_map and null count. std::vector<column_validity_info> parent_column_data; std::vector<size_type> parent_column_index; // Start row of child columns [stripe][column]. std::vector<uint32_t> child_start_row; // Number of rows of child columns [stripe][column]. std::vector<uint32_t> num_child_rows_per_stripe; struct row_group_meta { uint32_t num_rows; // number of rows in a column in a row group uint32_t start_row; // start row in a column in a row group }; // Row group metadata [rowgroup][column]. std::vector<row_group_meta> rwgrp_meta; }; /** * @brief Struct that maps ORC streams to columns */ struct orc_stream_info { explicit orc_stream_info(uint64_t offset_, std::size_t dst_pos_, uint32_t length_, uint32_t stripe_idx_) : offset(offset_), dst_pos(dst_pos_), length(length_), stripe_idx(stripe_idx_) { } uint64_t offset; // offset in file std::size_t dst_pos; // offset in memory relative to start of compressed stripe data std::size_t length; // length in file uint32_t stripe_idx; // stripe index }; /** * @brief Function that populates column descriptors stream/chunk */ std::size_t gather_stream_info(std::size_t stripe_index, orc::StripeInformation const* stripeinfo, orc::StripeFooter const* stripefooter, host_span<int const> orc2gdf, host_span<orc::SchemaType const> types, bool use_index, bool apply_struct_map, std::size_t* num_dictionary_entries, std::vector<orc_stream_info>& stream_info, cudf::detail::hostdevice_2dvector<gpu::ColumnDesc>& chunks) { uint64_t src_offset = 0; uint64_t dst_offset = 0; auto const get_stream_index_type = [](orc::StreamKind kind) { switch (kind) { case orc::DATA: return gpu::CI_DATA; case orc::LENGTH: case orc::SECONDARY: return gpu::CI_DATA2; case orc::DICTIONARY_DATA: return gpu::CI_DICTIONARY; case orc::PRESENT: return gpu::CI_PRESENT; case orc::ROW_INDEX: return gpu::CI_INDEX; default: // Skip this stream as it's not strictly required return gpu::CI_NUM_STREAMS; } }; for (auto const& stream : stripefooter->streams) { if (!stream.column_id || *stream.column_id >= orc2gdf.size()) { dst_offset += stream.length; continue; } auto const column_id = *stream.column_id; auto col = orc2gdf[column_id]; if (col == -1 and apply_struct_map) { // A struct-type column has no data itself, but rather child columns // for each of its fields. There is only a PRESENT stream, which // needs to be included for the reader. auto const schema_type = types[column_id]; if (not schema_type.subtypes.empty()) { if (schema_type.kind == orc::STRUCT && stream.kind == orc::PRESENT) { for (auto const& idx : schema_type.subtypes) { auto child_idx = (idx < orc2gdf.size()) ? orc2gdf[idx] : -1; if (child_idx >= 0) { col = child_idx; auto& chunk = chunks[stripe_index][col]; chunk.strm_id[gpu::CI_PRESENT] = stream_info.size(); chunk.strm_len[gpu::CI_PRESENT] = stream.length; } } } } } if (col != -1) { if (src_offset >= stripeinfo->indexLength || use_index) { auto& chunk = chunks[stripe_index][col]; auto const index_type = get_stream_index_type(stream.kind); if (index_type < gpu::CI_NUM_STREAMS) { chunk.strm_id[index_type] = stream_info.size(); chunk.strm_len[index_type] = stream.length; // NOTE: skip_count field is temporarily used to track the presence of index streams chunk.skip_count |= 1 << index_type; if (index_type == gpu::CI_DICTIONARY) { chunk.dictionary_start = *num_dictionary_entries; chunk.dict_len = stripefooter->columns[column_id].dictionarySize; *num_dictionary_entries += stripefooter->columns[column_id].dictionarySize; } } } stream_info.emplace_back( stripeinfo->offset + src_offset, dst_offset, stream.length, stripe_index); dst_offset += stream.length; } src_offset += stream.length; } return dst_offset; } /** * @brief Decompresses the stripe data, at stream granularity. * * @param decompressor Block decompressor * @param stripe_data List of source stripe column data * @param stream_info List of stream to column mappings * @param chunks Vector of list of column chunk descriptors * @param row_groups Vector of list of row index descriptors * @param num_stripes Number of stripes making up column chunks * @param row_index_stride Distance between each row index * @param use_base_stride Whether to use base stride obtained from meta or use the computed value * @param stream CUDA stream used for device memory operations and kernel launches * @return Device buffer to decompressed page data */ rmm::device_buffer decompress_stripe_data( OrcDecompressor const& decompressor, host_span<rmm::device_buffer const> stripe_data, host_span<orc_stream_info> stream_info, cudf::detail::hostdevice_2dvector<gpu::ColumnDesc>& chunks, cudf::detail::hostdevice_2dvector<gpu::RowGroup>& row_groups, std::size_t num_stripes, std::size_t row_index_stride, bool use_base_stride, rmm::cuda_stream_view stream) { // Parse the columns' compressed info cudf::detail::hostdevice_vector<gpu::CompressedStreamInfo> compinfo( 0, stream_info.size(), stream); for (auto const& info : stream_info) { compinfo.push_back(gpu::CompressedStreamInfo( static_cast<uint8_t const*>(stripe_data[info.stripe_idx].data()) + info.dst_pos, info.length)); } compinfo.host_to_device_async(stream); gpu::ParseCompressedStripeData(compinfo.device_ptr(), compinfo.size(), decompressor.GetBlockSize(), decompressor.GetLog2MaxCompressionRatio(), stream); compinfo.device_to_host_sync(stream); // Count the exact number of compressed blocks std::size_t num_compressed_blocks = 0; std::size_t num_uncompressed_blocks = 0; std::size_t total_decomp_size = 0; for (std::size_t i = 0; i < compinfo.size(); ++i) { num_compressed_blocks += compinfo[i].num_compressed_blocks; num_uncompressed_blocks += compinfo[i].num_uncompressed_blocks; total_decomp_size += compinfo[i].max_uncompressed_size; } CUDF_EXPECTS( not((num_uncompressed_blocks + num_compressed_blocks > 0) and (total_decomp_size == 0)), "Inconsistent info on compression blocks"); // Buffer needs to be padded. // Required by `gpuDecodeOrcColumnData`. rmm::device_buffer decomp_data( cudf::util::round_up_safe(total_decomp_size, BUFFER_PADDING_MULTIPLE), stream); if (decomp_data.is_empty()) { return decomp_data; } rmm::device_uvector<device_span<uint8_t const>> inflate_in( num_compressed_blocks + num_uncompressed_blocks, stream); rmm::device_uvector<device_span<uint8_t>> inflate_out( num_compressed_blocks + num_uncompressed_blocks, stream); rmm::device_uvector<compression_result> inflate_res(num_compressed_blocks, stream); thrust::fill(rmm::exec_policy(stream), inflate_res.begin(), inflate_res.end(), compression_result{0, compression_status::FAILURE}); // Parse again to populate the decompression input/output buffers std::size_t decomp_offset = 0; uint32_t max_uncomp_block_size = 0; uint32_t start_pos = 0; auto start_pos_uncomp = (uint32_t)num_compressed_blocks; for (std::size_t i = 0; i < compinfo.size(); ++i) { auto dst_base = static_cast<uint8_t*>(decomp_data.data()); compinfo[i].uncompressed_data = dst_base + decomp_offset; compinfo[i].dec_in_ctl = inflate_in.data() + start_pos; compinfo[i].dec_out_ctl = inflate_out.data() + start_pos; compinfo[i].dec_res = {inflate_res.data() + start_pos, compinfo[i].num_compressed_blocks}; compinfo[i].copy_in_ctl = inflate_in.data() + start_pos_uncomp; compinfo[i].copy_out_ctl = inflate_out.data() + start_pos_uncomp; stream_info[i].dst_pos = decomp_offset; decomp_offset += compinfo[i].max_uncompressed_size; start_pos += compinfo[i].num_compressed_blocks; start_pos_uncomp += compinfo[i].num_uncompressed_blocks; max_uncomp_block_size = std::max(max_uncomp_block_size, compinfo[i].max_uncompressed_block_size); } compinfo.host_to_device_async(stream); gpu::ParseCompressedStripeData(compinfo.device_ptr(), compinfo.size(), decompressor.GetBlockSize(), decompressor.GetLog2MaxCompressionRatio(), stream); // Value for checking whether we decompress successfully. // It doesn't need to be atomic as there is no race condition: we only write `true` if needed. cudf::detail::hostdevice_vector<bool> any_block_failure(1, stream); any_block_failure[0] = false; any_block_failure.host_to_device_async(stream); // Dispatch batches of blocks to decompress if (num_compressed_blocks > 0) { device_span<device_span<uint8_t const>> inflate_in_view{inflate_in.data(), num_compressed_blocks}; device_span<device_span<uint8_t>> inflate_out_view{inflate_out.data(), num_compressed_blocks}; switch (decompressor.compression()) { case compression_type::ZLIB: if (nvcomp::is_decompression_disabled(nvcomp::compression_type::DEFLATE)) { gpuinflate( inflate_in_view, inflate_out_view, inflate_res, gzip_header_included::NO, stream); } else { nvcomp::batched_decompress(nvcomp::compression_type::DEFLATE, inflate_in_view, inflate_out_view, inflate_res, max_uncomp_block_size, total_decomp_size, stream); } break; case compression_type::SNAPPY: if (nvcomp::is_decompression_disabled(nvcomp::compression_type::SNAPPY)) { gpu_unsnap(inflate_in_view, inflate_out_view, inflate_res, stream); } else { nvcomp::batched_decompress(nvcomp::compression_type::SNAPPY, inflate_in_view, inflate_out_view, inflate_res, max_uncomp_block_size, total_decomp_size, stream); } break; case compression_type::ZSTD: if (auto const reason = nvcomp::is_decompression_disabled(nvcomp::compression_type::ZSTD); reason) { CUDF_FAIL("Decompression error: " + reason.value()); } nvcomp::batched_decompress(nvcomp::compression_type::ZSTD, inflate_in_view, inflate_out_view, inflate_res, max_uncomp_block_size, total_decomp_size, stream); break; default: CUDF_FAIL("Unexpected decompression dispatch"); break; } // Check if any block has been failed to decompress. // Not using `thrust::any` or `thrust::count_if` to defer stream sync. thrust::for_each( rmm::exec_policy(stream), thrust::make_counting_iterator(std::size_t{0}), thrust::make_counting_iterator(inflate_res.size()), [results = inflate_res.begin(), any_block_failure = any_block_failure.device_ptr()] __device__(auto const idx) { if (results[idx].status != compression_status::SUCCESS) { *any_block_failure = true; } }); } if (num_uncompressed_blocks > 0) { device_span<device_span<uint8_t const>> copy_in_view{inflate_in.data() + num_compressed_blocks, num_uncompressed_blocks}; device_span<device_span<uint8_t>> copy_out_view{inflate_out.data() + num_compressed_blocks, num_uncompressed_blocks}; gpu_copy_uncompressed_blocks(copy_in_view, copy_out_view, stream); } // Copy without stream sync, thus need to wait for stream sync below to access. any_block_failure.device_to_host_async(stream); gpu::PostDecompressionReassemble(compinfo.device_ptr(), compinfo.size(), stream); compinfo.device_to_host_sync(stream); // This also sync stream for `any_block_failure`. // We can check on host after stream synchronize CUDF_EXPECTS(not any_block_failure[0], "Error during decompression"); auto const num_columns = chunks.size().second; // Update the stream information with the updated uncompressed info // TBD: We could update the value from the information we already // have in stream_info[], but using the gpu results also updates // max_uncompressed_size to the actual uncompressed size, or zero if // decompression failed. for (std::size_t i = 0; i < num_stripes; ++i) { for (std::size_t j = 0; j < num_columns; ++j) { auto& chunk = chunks[i][j]; for (int k = 0; k < gpu::CI_NUM_STREAMS; ++k) { if (chunk.strm_len[k] > 0 && chunk.strm_id[k] < compinfo.size()) { chunk.streams[k] = compinfo[chunk.strm_id[k]].uncompressed_data; chunk.strm_len[k] = compinfo[chunk.strm_id[k]].max_uncompressed_size; } } } } if (row_groups.size().first) { chunks.host_to_device_async(stream); row_groups.host_to_device_async(stream); gpu::ParseRowGroupIndex(row_groups.base_device_ptr(), compinfo.device_ptr(), chunks.base_device_ptr(), num_columns, num_stripes, row_groups.size().first, row_index_stride, use_base_stride, stream); } return decomp_data; } /** * @brief Updates null mask of columns whose parent is a struct column. * * If struct column has null element, that row would be skipped while writing child column in ORC, * so we need to insert the missing null elements in child column. There is another behavior from * pyspark, where if the child column doesn't have any null elements, it will not have present * stream, so in that case parent null mask need to be copied to child column. * * @param chunks Vector of list of column chunk descriptors * @param out_buffers Output columns' device buffers * @param stream CUDA stream used for device memory operations and kernel launches. * @param mr Device memory resource to use for device memory allocation */ void update_null_mask(cudf::detail::hostdevice_2dvector<gpu::ColumnDesc>& chunks, host_span<column_buffer> out_buffers, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { auto const num_stripes = chunks.size().first; auto const num_columns = chunks.size().second; bool is_mask_updated = false; for (std::size_t col_idx = 0; col_idx < num_columns; ++col_idx) { if (chunks[0][col_idx].parent_validity_info.valid_map_base != nullptr) { if (not is_mask_updated) { chunks.device_to_host_sync(stream); is_mask_updated = true; } auto parent_valid_map_base = chunks[0][col_idx].parent_validity_info.valid_map_base; auto child_valid_map_base = out_buffers[col_idx].null_mask(); auto child_mask_len = chunks[0][col_idx].column_num_rows - chunks[0][col_idx].parent_validity_info.null_count; auto parent_mask_len = chunks[0][col_idx].column_num_rows; if (child_valid_map_base != nullptr) { rmm::device_uvector<uint32_t> dst_idx(child_mask_len, stream); // Copy indexes at which the parent has valid value. thrust::copy_if(rmm::exec_policy(stream), thrust::make_counting_iterator(0), thrust::make_counting_iterator(0) + parent_mask_len, dst_idx.begin(), [parent_valid_map_base] __device__(auto idx) { return bit_is_set(parent_valid_map_base, idx); }); auto merged_null_mask = cudf::detail::create_null_mask( parent_mask_len, mask_state::ALL_NULL, rmm::cuda_stream_view(stream), mr); auto merged_mask = static_cast<bitmask_type*>(merged_null_mask.data()); uint32_t* dst_idx_ptr = dst_idx.data(); // Copy child valid bits from child column to valid indexes, this will merge both child // and parent null masks thrust::for_each(rmm::exec_policy(stream), thrust::make_counting_iterator(0), thrust::make_counting_iterator(0) + dst_idx.size(), [child_valid_map_base, dst_idx_ptr, merged_mask] __device__(auto idx) { if (bit_is_set(child_valid_map_base, idx)) { cudf::set_bit(merged_mask, dst_idx_ptr[idx]); }; }); out_buffers[col_idx].set_null_mask(std::move(merged_null_mask)); } else { // Since child column doesn't have a mask, copy parent null mask auto mask_size = bitmask_allocation_size_bytes(parent_mask_len); out_buffers[col_idx].set_null_mask( rmm::device_buffer(static_cast<void*>(parent_valid_map_base), mask_size, stream, mr)); } } } if (is_mask_updated) { // Update chunks with pointers to column data which might have been changed. for (std::size_t stripe_idx = 0; stripe_idx < num_stripes; ++stripe_idx) { for (std::size_t col_idx = 0; col_idx < num_columns; ++col_idx) { auto& chunk = chunks[stripe_idx][col_idx]; chunk.valid_map_base = out_buffers[col_idx].null_mask(); } } chunks.host_to_device_sync(stream); } } /** * @brief Converts the stripe column data and outputs to columns. * * @param num_dicts Number of dictionary entries required * @param skip_rows Number of rows to offset from start * @param row_index_stride Distance between each row index * @param level Current nesting level being processed * @param tz_table Local time to UTC conversion table * @param chunks Vector of list of column chunk descriptors * @param row_groups Vector of list of row index descriptors * @param out_buffers Output columns' device buffers * @param stream CUDA stream used for device memory operations and kernel launches * @param mr Device memory resource to use for device memory allocation */ void decode_stream_data(std::size_t num_dicts, std::size_t skip_rows, std::size_t row_index_stride, std::size_t level, table_view const& tz_table, cudf::detail::hostdevice_2dvector<gpu::ColumnDesc>& chunks, cudf::detail::device_2dspan<gpu::RowGroup> row_groups, std::vector<column_buffer>& out_buffers, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { auto const num_stripes = chunks.size().first; auto const num_columns = chunks.size().second; thrust::counting_iterator<int> col_idx_it(0); thrust::counting_iterator<int> stripe_idx_it(0); // Update chunks with pointers to column data std::for_each(stripe_idx_it, stripe_idx_it + num_stripes, [&](auto stripe_idx) { std::for_each(col_idx_it, col_idx_it + num_columns, [&](auto col_idx) { auto& chunk = chunks[stripe_idx][col_idx]; chunk.column_data_base = out_buffers[col_idx].data(); chunk.valid_map_base = out_buffers[col_idx].null_mask(); }); }); // Allocate global dictionary for deserializing rmm::device_uvector<gpu::DictionaryEntry> global_dict(num_dicts, stream); chunks.host_to_device_sync(stream); gpu::DecodeNullsAndStringDictionaries( chunks.base_device_ptr(), global_dict.data(), num_columns, num_stripes, skip_rows, stream); if (level > 0) { // Update nullmasks for children if parent was a struct and had null mask update_null_mask(chunks, out_buffers, stream, mr); } auto const tz_table_dptr = table_device_view::create(tz_table, stream); rmm::device_scalar<size_type> error_count(0, stream); // Update the null map for child columns gpu::DecodeOrcColumnData(chunks.base_device_ptr(), global_dict.data(), row_groups, num_columns, num_stripes, skip_rows, *tz_table_dptr, row_groups.size().first, row_index_stride, level, error_count.data(), stream); chunks.device_to_host_async(stream); // `value` synchronizes auto const num_errors = error_count.value(stream); CUDF_EXPECTS(num_errors == 0, "ORC data decode failed"); std::for_each(col_idx_it + 0, col_idx_it + num_columns, [&](auto col_idx) { out_buffers[col_idx].null_count() = std::accumulate(stripe_idx_it + 0, stripe_idx_it + num_stripes, 0, [&](auto null_count, auto const stripe_idx) { return null_count + chunks[stripe_idx][col_idx].null_count; }); }); } /** * @brief Compute the per-stripe prefix sum of null count, for each struct column in the current * layer. */ void scan_null_counts(cudf::detail::hostdevice_2dvector<gpu::ColumnDesc> const& chunks, cudf::host_span<rmm::device_uvector<uint32_t>> prefix_sums, rmm::cuda_stream_view stream) { auto const num_stripes = chunks.size().first; if (num_stripes == 0) return; auto const num_columns = chunks.size().second; std::vector<thrust::pair<size_type, cudf::device_span<uint32_t>>> prefix_sums_to_update; for (auto col_idx = 0ul; col_idx < num_columns; ++col_idx) { // Null counts sums are only needed for children of struct columns if (chunks[0][col_idx].type_kind == STRUCT) { prefix_sums_to_update.emplace_back(col_idx, prefix_sums[col_idx]); } } auto const d_prefix_sums_to_update = cudf::detail::make_device_uvector_async( prefix_sums_to_update, stream, rmm::mr::get_current_device_resource()); thrust::for_each(rmm::exec_policy(stream), d_prefix_sums_to_update.begin(), d_prefix_sums_to_update.end(), [chunks = cudf::detail::device_2dspan<gpu::ColumnDesc const>{chunks}] __device__( auto const& idx_psums) { auto const col_idx = idx_psums.first; auto const psums = idx_psums.second; thrust::transform( thrust::seq, thrust::make_counting_iterator(0), thrust::make_counting_iterator(0) + psums.size(), psums.begin(), [&](auto stripe_idx) { return chunks[stripe_idx][col_idx].null_count; }); thrust::inclusive_scan(thrust::seq, psums.begin(), psums.end(), psums.begin()); }); // `prefix_sums_to_update` goes out of scope, copy has to be done before we return stream.synchronize(); } /** * @brief Aggregate child metadata from parent column chunks. */ void aggregate_child_meta(std::size_t level, cudf::io::orc::detail::column_hierarchy const& selected_columns, cudf::detail::host_2dspan<gpu::ColumnDesc> chunks, cudf::detail::host_2dspan<gpu::RowGroup> row_groups, host_span<orc_column_meta const> list_col, host_span<column_buffer> out_buffers, reader_column_meta& col_meta) { auto const num_of_stripes = chunks.size().first; auto const num_of_rowgroups = row_groups.size().first; auto const num_child_cols = selected_columns.levels[level + 1].size(); auto const number_of_child_chunks = num_child_cols * num_of_stripes; auto& num_child_rows = col_meta.num_child_rows; auto& parent_column_data = col_meta.parent_column_data; // Reset the meta to store child column details. num_child_rows.resize(selected_columns.levels[level + 1].size()); std::fill(num_child_rows.begin(), num_child_rows.end(), 0); parent_column_data.resize(number_of_child_chunks); col_meta.parent_column_index.resize(number_of_child_chunks); col_meta.child_start_row.resize(number_of_child_chunks); col_meta.num_child_rows_per_stripe.resize(number_of_child_chunks); col_meta.rwgrp_meta.resize(num_of_rowgroups * num_child_cols); auto child_start_row = cudf::detail::host_2dspan<uint32_t>( col_meta.child_start_row.data(), num_of_stripes, num_child_cols); auto num_child_rows_per_stripe = cudf::detail::host_2dspan<uint32_t>( col_meta.num_child_rows_per_stripe.data(), num_of_stripes, num_child_cols); auto rwgrp_meta = cudf::detail::host_2dspan<reader_column_meta::row_group_meta>( col_meta.rwgrp_meta.data(), num_of_rowgroups, num_child_cols); int index = 0; // number of child column processed // For each parent column, update its child column meta for each stripe. std::for_each(list_col.begin(), list_col.end(), [&](auto const p_col) { auto const parent_col_idx = col_meta.orc_col_map[level][p_col.id]; auto start_row = 0; auto processed_row_groups = 0; for (std::size_t stripe_id = 0; stripe_id < num_of_stripes; stripe_id++) { // Aggregate num_rows and start_row from processed parent columns per row groups if (num_of_rowgroups) { auto stripe_num_row_groups = chunks[stripe_id][parent_col_idx].num_rowgroups; auto processed_child_rows = 0; for (std::size_t rowgroup_id = 0; rowgroup_id < stripe_num_row_groups; rowgroup_id++, processed_row_groups++) { auto const child_rows = row_groups[processed_row_groups][parent_col_idx].num_child_rows; for (size_type id = 0; id < p_col.num_children; id++) { auto const child_col_idx = index + id; rwgrp_meta[processed_row_groups][child_col_idx].start_row = processed_child_rows; rwgrp_meta[processed_row_groups][child_col_idx].num_rows = child_rows; } processed_child_rows += child_rows; } } // Aggregate start row, number of rows per chunk and total number of rows in a column auto const child_rows = chunks[stripe_id][parent_col_idx].num_child_rows; for (size_type id = 0; id < p_col.num_children; id++) { auto const child_col_idx = index + id; num_child_rows[child_col_idx] += child_rows; num_child_rows_per_stripe[stripe_id][child_col_idx] = child_rows; // start row could be different for each column when there is nesting at each stripe level child_start_row[stripe_id][child_col_idx] = (stripe_id == 0) ? 0 : start_row; } start_row += child_rows; } // Parent column null mask and null count would be required for child column // to adjust its nullmask. auto type = out_buffers[parent_col_idx].type.id(); auto parent_null_count = static_cast<uint32_t>(out_buffers[parent_col_idx].null_count()); auto parent_valid_map = out_buffers[parent_col_idx].null_mask(); auto num_rows = out_buffers[parent_col_idx].size; for (size_type id = 0; id < p_col.num_children; id++) { auto const child_col_idx = index + id; col_meta.parent_column_index[child_col_idx] = parent_col_idx; if (type == type_id::STRUCT) { parent_column_data[child_col_idx] = {parent_valid_map, parent_null_count}; // Number of rows in child will remain same as parent in case of struct column num_child_rows[child_col_idx] = num_rows; } else { parent_column_data[child_col_idx] = {nullptr, 0}; } } index += p_col.num_children; }); } /** * @brief struct to store buffer data and size of list buffer */ struct list_buffer_data { size_type* data; size_type size; }; // Generates offsets for list buffer from number of elements in a row. void generate_offsets_for_list(host_span<list_buffer_data> buff_data, rmm::cuda_stream_view stream) { for (auto& list_data : buff_data) { thrust::exclusive_scan(rmm::exec_policy_nosync(stream), list_data.data, list_data.data + list_data.size, list_data.data); } } /** * @brief Function that translates ORC data kind to cuDF type enum */ constexpr type_id to_cudf_type(orc::TypeKind kind, bool use_np_dtypes, type_id timestamp_type_id, type_id decimal_type_id) { switch (kind) { case orc::BOOLEAN: return type_id::BOOL8; case orc::BYTE: return type_id::INT8; case orc::SHORT: return type_id::INT16; case orc::INT: return type_id::INT32; case orc::LONG: return type_id::INT64; case orc::FLOAT: return type_id::FLOAT32; case orc::DOUBLE: return type_id::FLOAT64; case orc::STRING: case orc::BINARY: case orc::VARCHAR: case orc::CHAR: // Variable-length types can all be mapped to STRING return type_id::STRING; case orc::TIMESTAMP: return (timestamp_type_id != type_id::EMPTY) ? timestamp_type_id : type_id::TIMESTAMP_NANOSECONDS; case orc::DATE: // There isn't a (DAYS -> np.dtype) mapping return (use_np_dtypes) ? type_id::TIMESTAMP_MILLISECONDS : type_id::TIMESTAMP_DAYS; case orc::DECIMAL: return decimal_type_id; // Need to update once cuDF plans to support map type case orc::MAP: case orc::LIST: return type_id::LIST; case orc::STRUCT: return type_id::STRUCT; default: break; } return type_id::EMPTY; } /** * @brief Determines cuDF type of an ORC Decimal column. */ type_id to_cudf_decimal_type(host_span<std::string const> decimal128_columns, cudf::io::orc::detail::aggregate_orc_metadata const& metadata, int column_index) { if (metadata.get_col_type(column_index).kind != DECIMAL) { return type_id::EMPTY; } if (std::find(decimal128_columns.begin(), decimal128_columns.end(), metadata.column_path(0, column_index)) != decimal128_columns.end()) { return type_id::DECIMAL128; } auto const precision = metadata.get_col_type(column_index) .precision.value_or(cuda::std::numeric_limits<int64_t>::digits10); if (precision <= cuda::std::numeric_limits<int32_t>::digits10) { return type_id::DECIMAL32; } if (precision <= cuda::std::numeric_limits<int64_t>::digits10) { return type_id::DECIMAL64; } return type_id::DECIMAL128; } std::string get_map_child_col_name(std::size_t const idx) { return (idx == 0) ? "key" : "value"; } /** * @brief Create empty columns and respective schema information from the buffer. */ std::unique_ptr<column> create_empty_column( size_type orc_col_id, cudf::io::orc::detail::aggregate_orc_metadata const& metadata, host_span<std::string const> decimal128_columns, bool use_np_dtypes, data_type timestamp_type, column_name_info& schema_info, rmm::cuda_stream_view stream) { schema_info.name = metadata.column_name(0, orc_col_id); auto const kind = metadata.get_col_type(orc_col_id).kind; auto const type = to_cudf_type(kind, use_np_dtypes, timestamp_type.id(), to_cudf_decimal_type(decimal128_columns, metadata, orc_col_id)); switch (kind) { case orc::LIST: { schema_info.children.emplace_back("offsets"); schema_info.children.emplace_back(""); return make_lists_column(0, make_empty_column(type_id::INT32), create_empty_column(metadata.get_col_type(orc_col_id).subtypes[0], metadata, decimal128_columns, use_np_dtypes, timestamp_type, schema_info.children.back(), stream), 0, rmm::device_buffer{0, stream}, stream); } case orc::MAP: { schema_info.children.emplace_back("offsets"); schema_info.children.emplace_back("struct"); auto const child_column_ids = metadata.get_col_type(orc_col_id).subtypes; auto& children_schema = schema_info.children.back().children; std::vector<std::unique_ptr<column>> child_columns; for (std::size_t idx = 0; idx < metadata.get_col_type(orc_col_id).subtypes.size(); idx++) { children_schema.emplace_back(""); child_columns.push_back(create_empty_column(child_column_ids[idx], metadata, decimal128_columns, use_np_dtypes, timestamp_type, schema_info.children.back().children.back(), stream)); children_schema[idx].name = get_map_child_col_name(idx); } return make_lists_column( 0, make_empty_column(type_id::INT32), make_structs_column(0, std::move(child_columns), 0, rmm::device_buffer{0, stream}, stream), 0, rmm::device_buffer{0, stream}, stream); } case orc::STRUCT: { std::vector<std::unique_ptr<column>> child_columns; for (auto const col : metadata.get_col_type(orc_col_id).subtypes) { schema_info.children.emplace_back(""); child_columns.push_back(create_empty_column(col, metadata, decimal128_columns, use_np_dtypes, timestamp_type, schema_info.children.back(), stream)); } return make_structs_column( 0, std::move(child_columns), 0, rmm::device_buffer{0, stream}, stream); } case orc::DECIMAL: { int32_t scale = 0; if (type == type_id::DECIMAL32 or type == type_id::DECIMAL64 or type == type_id::DECIMAL128) { scale = -static_cast<int32_t>(metadata.get_types()[orc_col_id].scale.value_or(0)); } return make_empty_column(data_type(type, scale)); } default: return make_empty_column(type); } } /** * @brief Assemble the buffer with child columns. */ column_buffer assemble_buffer(size_type orc_col_id, std::size_t level, reader_column_meta const& col_meta, cudf::io::orc::detail::aggregate_orc_metadata const& metadata, cudf::io::orc::detail::column_hierarchy const& selected_columns, std::vector<std::vector<column_buffer>>& col_buffers, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { auto const col_id = col_meta.orc_col_map[level][orc_col_id]; auto& col_buffer = col_buffers[level][col_id]; col_buffer.name = metadata.column_name(0, orc_col_id); auto kind = metadata.get_col_type(orc_col_id).kind; switch (kind) { case orc::LIST: case orc::STRUCT: { auto const& children_indices = selected_columns.children.at(orc_col_id); for (auto const child_id : children_indices) { col_buffer.children.emplace_back(assemble_buffer( child_id, level + 1, col_meta, metadata, selected_columns, col_buffers, stream, mr)); } } break; case orc::MAP: { std::vector<column_buffer> child_col_buffers; // Get child buffers auto const& children_indices = selected_columns.children.at(orc_col_id); for (std::size_t idx = 0; idx < children_indices.size(); idx++) { auto const col = children_indices[idx]; child_col_buffers.emplace_back(assemble_buffer( col, level + 1, col_meta, metadata, selected_columns, col_buffers, stream, mr)); child_col_buffers.back().name = get_map_child_col_name(idx); } // Create a struct buffer auto num_rows = child_col_buffers[0].size; auto struct_buffer = column_buffer(cudf::data_type(type_id::STRUCT), num_rows, false, stream, mr); struct_buffer.children = std::move(child_col_buffers); struct_buffer.name = "struct"; col_buffer.children.emplace_back(std::move(struct_buffer)); } break; default: break; } return std::move(col_buffer); } } // namespace reader::impl::impl(std::vector<std::unique_ptr<datasource>>&& sources, orc_reader_options const& options, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) : _stream(stream), _mr(mr), _sources(std::move(sources)), _metadata{_sources, stream}, _selected_columns{_metadata.select_columns(options.get_columns())}, _timestamp_type{options.get_timestamp_type()}, _use_index{options.is_enabled_use_index()}, _use_np_dtypes{options.is_enabled_use_np_dtypes()}, _decimal128_columns{options.get_decimal128_columns()}, _col_meta{std::make_unique<reader_column_meta>()} { } table_with_metadata reader::impl::read(uint64_t skip_rows, std::optional<size_type> const& num_rows_opt, std::vector<std::vector<size_type>> const& stripes) { // Selected columns at different levels of nesting are stored in different elements // of `selected_columns`; thus, size == 1 means no nested columns CUDF_EXPECTS(skip_rows == 0 or _selected_columns.num_levels() == 1, "skip_rows is not supported by nested columns"); // There are no columns in the table if (_selected_columns.num_levels() == 0) { return {std::make_unique<table>(), table_metadata{}}; } std::vector<std::vector<column_buffer>> out_buffers(_selected_columns.num_levels()); std::vector<std::unique_ptr<column>> out_columns; table_metadata out_metadata; // Copy user data to the output metadata. std::transform(_metadata.per_file_metadata.cbegin(), _metadata.per_file_metadata.cend(), std::back_inserter(out_metadata.per_file_user_data), [](auto& meta) { std::unordered_map<std::string, std::string> kv_map; std::transform(meta.ff.metadata.cbegin(), meta.ff.metadata.cend(), std::inserter(kv_map, kv_map.end()), [](auto const& kv) { return std::pair{kv.name, kv.value}; }); return kv_map; }); out_metadata.user_data = {out_metadata.per_file_user_data[0].begin(), out_metadata.per_file_user_data[0].end()}; // Select only stripes required (aka row groups) auto const [rows_to_skip, rows_to_read, selected_stripes] = _metadata.select_stripes(stripes, skip_rows, num_rows_opt, _stream); // If no rows or stripes to read, return empty columns if (rows_to_read == 0 || selected_stripes.empty()) { std::transform(_selected_columns.levels[0].begin(), _selected_columns.levels[0].end(), std::back_inserter(out_columns), [&](auto const col_meta) { out_metadata.schema_info.emplace_back(""); return create_empty_column(col_meta.id, _metadata, _decimal128_columns, _use_np_dtypes, _timestamp_type, out_metadata.schema_info.back(), _stream); }); return {std::make_unique<table>(std::move(out_columns)), std::move(out_metadata)}; } // Set up table for converting timestamp columns from local to UTC time auto const tz_table = [&, &selected_stripes = selected_stripes] { auto const has_timestamp_column = std::any_of( _selected_columns.levels.cbegin(), _selected_columns.levels.cend(), [&](auto const& col_lvl) { return std::any_of(col_lvl.cbegin(), col_lvl.cend(), [&](auto const& col_meta) { return _metadata.get_col_type(col_meta.id).kind == TypeKind::TIMESTAMP; }); }); return has_timestamp_column ? cudf::detail::make_timezone_transition_table( {}, selected_stripes[0].stripe_info[0].second->writerTimezone, _stream) : std::make_unique<cudf::table>(); }(); std::vector<std::vector<rmm::device_buffer>> lvl_stripe_data(_selected_columns.num_levels()); std::vector<std::vector<rmm::device_uvector<uint32_t>>> null_count_prefix_sums; // Iterates through levels of nested columns, child column will be one level down // compared to parent column. auto& col_meta = *_col_meta; for (std::size_t level = 0; level < _selected_columns.num_levels(); ++level) { auto& columns_level = _selected_columns.levels[level]; // Association between each ORC column and its cudf::column col_meta.orc_col_map.emplace_back(_metadata.get_num_cols(), -1); std::vector<orc_column_meta> nested_col; // Get a list of column data types std::vector<data_type> column_types; for (auto& col : columns_level) { auto col_type = to_cudf_type(_metadata.get_col_type(col.id).kind, _use_np_dtypes, _timestamp_type.id(), to_cudf_decimal_type(_decimal128_columns, _metadata, col.id)); CUDF_EXPECTS(col_type != type_id::EMPTY, "Unknown type"); if (col_type == type_id::DECIMAL32 or col_type == type_id::DECIMAL64 or col_type == type_id::DECIMAL128) { // sign of the scale is changed since cuDF follows c++ libraries like CNL // which uses negative scaling, but liborc and other libraries // follow positive scaling. auto const scale = -static_cast<size_type>(_metadata.get_col_type(col.id).scale.value_or(0)); column_types.emplace_back(col_type, scale); } else { column_types.emplace_back(col_type); } // Map each ORC column to its column col_meta.orc_col_map[level][col.id] = column_types.size() - 1; if (col_type == type_id::LIST or col_type == type_id::STRUCT) { nested_col.emplace_back(col); } } // Get the total number of stripes across all input files. std::size_t total_num_stripes = std::accumulate(selected_stripes.begin(), selected_stripes.end(), 0, [](std::size_t sum, auto& stripe_source_mapping) { return sum + stripe_source_mapping.stripe_info.size(); }); auto const num_columns = columns_level.size(); cudf::detail::hostdevice_2dvector<gpu::ColumnDesc> chunks( total_num_stripes, num_columns, _stream); memset(chunks.base_host_ptr(), 0, chunks.size_bytes()); const bool use_index = _use_index && // Do stripes have row group index _metadata.is_row_grp_idx_present() && // Only use if we don't have much work with complete columns & stripes // TODO: Consider nrows, gpu, and tune the threshold (rows_to_read > _metadata.get_row_index_stride() && !(_metadata.get_row_index_stride() & 7) && _metadata.get_row_index_stride() > 0 && num_columns * total_num_stripes < 8 * 128) && // Only use if first row is aligned to a stripe boundary // TODO: Fix logic to handle unaligned rows (rows_to_skip == 0); // Logically view streams as columns std::vector<orc_stream_info> stream_info; null_count_prefix_sums.emplace_back(); null_count_prefix_sums.back().reserve(_selected_columns.levels[level].size()); std::generate_n(std::back_inserter(null_count_prefix_sums.back()), _selected_columns.levels[level].size(), [&]() { return cudf::detail::make_zeroed_device_uvector_async<uint32_t>( total_num_stripes, _stream, rmm::mr::get_current_device_resource()); }); // Tracker for eventually deallocating compressed and uncompressed data auto& stripe_data = lvl_stripe_data[level]; std::size_t stripe_start_row = 0; std::size_t num_dict_entries = 0; std::size_t num_rowgroups = 0; int stripe_idx = 0; std::vector<std::pair<std::future<std::size_t>, std::size_t>> read_tasks; for (auto const& stripe_source_mapping : selected_stripes) { // Iterate through the source files selected stripes for (auto const& stripe : stripe_source_mapping.stripe_info) { auto const stripe_info = stripe.first; auto const stripe_footer = stripe.second; auto stream_count = stream_info.size(); auto const total_data_size = gather_stream_info(stripe_idx, stripe_info, stripe_footer, col_meta.orc_col_map[level], _metadata.get_types(), use_index, level == 0, &num_dict_entries, stream_info, chunks); auto const is_stripe_data_empty = total_data_size == 0; CUDF_EXPECTS(not is_stripe_data_empty or stripe_info->indexLength == 0, "Invalid index rowgroup stream data"); // Buffer needs to be padded. // Required by `copy_uncompressed_kernel`. stripe_data.emplace_back( cudf::util::round_up_safe(total_data_size, BUFFER_PADDING_MULTIPLE), _stream); auto dst_base = static_cast<uint8_t*>(stripe_data.back().data()); // Coalesce consecutive streams into one read while (not is_stripe_data_empty and stream_count < stream_info.size()) { auto const d_dst = dst_base + stream_info[stream_count].dst_pos; auto const offset = stream_info[stream_count].offset; auto len = stream_info[stream_count].length; stream_count++; while (stream_count < stream_info.size() && stream_info[stream_count].offset == offset + len) { len += stream_info[stream_count].length; stream_count++; } if (_metadata.per_file_metadata[stripe_source_mapping.source_idx] .source->is_device_read_preferred(len)) { read_tasks.push_back( std::pair(_metadata.per_file_metadata[stripe_source_mapping.source_idx] .source->device_read_async(offset, len, d_dst, _stream), len)); } else { auto const buffer = _metadata.per_file_metadata[stripe_source_mapping.source_idx].source->host_read( offset, len); CUDF_EXPECTS(buffer->size() == len, "Unexpected discrepancy in bytes read."); CUDF_CUDA_TRY( cudaMemcpyAsync(d_dst, buffer->data(), len, cudaMemcpyDefault, _stream.value())); _stream.synchronize(); } } auto const num_rows_per_stripe = stripe_info->numberOfRows; auto const rowgroup_id = num_rowgroups; auto stripe_num_rowgroups = 0; if (use_index) { stripe_num_rowgroups = (num_rows_per_stripe + _metadata.get_row_index_stride() - 1) / _metadata.get_row_index_stride(); } // Update chunks to reference streams pointers for (std::size_t col_idx = 0; col_idx < num_columns; col_idx++) { auto& chunk = chunks[stripe_idx][col_idx]; // start row, number of rows in a each stripe and total number of rows // may change in lower levels of nesting chunk.start_row = (level == 0) ? stripe_start_row : col_meta.child_start_row[stripe_idx * num_columns + col_idx]; chunk.num_rows = (level == 0) ? stripe_info->numberOfRows : col_meta.num_child_rows_per_stripe[stripe_idx * num_columns + col_idx]; chunk.column_num_rows = (level == 0) ? rows_to_read : col_meta.num_child_rows[col_idx]; chunk.parent_validity_info = (level == 0) ? column_validity_info{} : col_meta.parent_column_data[col_idx]; chunk.parent_null_count_prefix_sums = (level == 0) ? nullptr : null_count_prefix_sums[level - 1][col_meta.parent_column_index[col_idx]].data(); chunk.encoding_kind = stripe_footer->columns[columns_level[col_idx].id].kind; chunk.type_kind = _metadata.per_file_metadata[stripe_source_mapping.source_idx] .ff.types[columns_level[col_idx].id] .kind; // num_child_rows for a struct column will be same, for other nested types it will be // calculated. chunk.num_child_rows = (chunk.type_kind != orc::STRUCT) ? 0 : chunk.num_rows; chunk.dtype_id = column_types[col_idx].id(); chunk.decimal_scale = _metadata.per_file_metadata[stripe_source_mapping.source_idx] .ff.types[columns_level[col_idx].id] .scale.value_or(0); chunk.rowgroup_id = rowgroup_id; chunk.dtype_len = (column_types[col_idx].id() == type_id::STRING) ? sizeof(string_index_pair) : ((column_types[col_idx].id() == type_id::LIST) or (column_types[col_idx].id() == type_id::STRUCT)) ? sizeof(size_type) : cudf::size_of(column_types[col_idx]); chunk.num_rowgroups = stripe_num_rowgroups; if (chunk.type_kind == orc::TIMESTAMP) { chunk.timestamp_type_id = _timestamp_type.id(); } if (not is_stripe_data_empty) { for (int k = 0; k < gpu::CI_NUM_STREAMS; k++) { chunk.streams[k] = dst_base + stream_info[chunk.strm_id[k]].dst_pos; } } } stripe_start_row += num_rows_per_stripe; num_rowgroups += stripe_num_rowgroups; stripe_idx++; } } for (auto& task : read_tasks) { CUDF_EXPECTS(task.first.get() == task.second, "Unexpected discrepancy in bytes read."); } if (stripe_data.empty()) { continue; } // Process dataset chunk pages into output columns auto row_groups = cudf::detail::hostdevice_2dvector<gpu::RowGroup>(num_rowgroups, num_columns, _stream); if (level > 0 and row_groups.size().first) { cudf::host_span<gpu::RowGroup> row_groups_span(row_groups.base_host_ptr(), num_rowgroups * num_columns); auto& rw_grp_meta = col_meta.rwgrp_meta; // Update start row and num rows per row group std::transform(rw_grp_meta.begin(), rw_grp_meta.end(), row_groups_span.begin(), rw_grp_meta.begin(), [&](auto meta, auto& row_grp) { row_grp.num_rows = meta.num_rows; row_grp.start_row = meta.start_row; return meta; }); } // Setup row group descriptors if using indexes if (_metadata.per_file_metadata[0].ps.compression != orc::NONE) { auto decomp_data = decompress_stripe_data(*_metadata.per_file_metadata[0].decompressor, stripe_data, stream_info, chunks, row_groups, total_num_stripes, _metadata.get_row_index_stride(), level == 0, _stream); stripe_data.clear(); stripe_data.push_back(std::move(decomp_data)); } else { if (row_groups.size().first) { chunks.host_to_device_async(_stream); row_groups.host_to_device_async(_stream); gpu::ParseRowGroupIndex(row_groups.base_device_ptr(), nullptr, chunks.base_device_ptr(), num_columns, total_num_stripes, num_rowgroups, _metadata.get_row_index_stride(), level == 0, _stream); } } for (std::size_t i = 0; i < column_types.size(); ++i) { bool is_nullable = false; for (std::size_t j = 0; j < total_num_stripes; ++j) { if (chunks[j][i].strm_len[gpu::CI_PRESENT] != 0) { is_nullable = true; break; } } auto is_list_type = (column_types[i].id() == type_id::LIST); auto n_rows = (level == 0) ? rows_to_read : col_meta.num_child_rows[i]; // For list column, offset column will be always size + 1 if (is_list_type) n_rows++; out_buffers[level].emplace_back(column_types[i], n_rows, is_nullable, _stream, _mr); } decode_stream_data(num_dict_entries, rows_to_skip, _metadata.get_row_index_stride(), level, tz_table->view(), chunks, row_groups, out_buffers[level], _stream, _mr); if (nested_col.size()) { // Extract information to process nested child columns scan_null_counts(chunks, null_count_prefix_sums[level], _stream); row_groups.device_to_host_sync(_stream); aggregate_child_meta( level, _selected_columns, chunks, row_groups, nested_col, out_buffers[level], col_meta); // ORC stores number of elements at each row, so we need to generate offsets from that std::vector<list_buffer_data> buff_data; std::for_each( out_buffers[level].begin(), out_buffers[level].end(), [&buff_data](auto& out_buffer) { if (out_buffer.type.id() == type_id::LIST) { auto data = static_cast<size_type*>(out_buffer.data()); buff_data.emplace_back(list_buffer_data{data, out_buffer.size}); } }); if (not buff_data.empty()) { generate_offsets_for_list(buff_data, _stream); } } } // Create columns from buffer with respective schema information. std::transform( _selected_columns.levels[0].begin(), _selected_columns.levels[0].end(), std::back_inserter(out_columns), [&](auto const& orc_col_meta) { out_metadata.schema_info.emplace_back(""); auto col_buffer = assemble_buffer( orc_col_meta.id, 0, col_meta, _metadata, _selected_columns, out_buffers, _stream, _mr); return make_column(col_buffer, &out_metadata.schema_info.back(), std::nullopt, _stream); }); return {std::make_unique<table>(std::move(out_columns)), std::move(out_metadata)}; } // Forward to implementation reader::reader(std::vector<std::unique_ptr<cudf::io::datasource>>&& sources, orc_reader_options const& options, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) : _impl{std::make_unique<impl>(std::move(sources), options, stream, mr)} { } // Destructor within this translation unit reader::~reader() = default; // Forward to implementation table_with_metadata reader::read(orc_reader_options const& options) { return _impl->read(options.get_skip_rows(), options.get_num_rows(), options.get_stripes()); } } // namespace cudf::io::detail::orc
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/orc/orc.hpp
/* * Copyright (c) 2019-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #pragma once #include <cudf/column/column_device_view.cuh> #include <cudf/io/datasource.hpp> #include <cudf/io/orc_metadata.hpp> #include <cudf/io/orc_types.hpp> #include <cudf/utilities/error.hpp> #include <io/comp/io_uncomp.hpp> #include <thrust/optional.h> #include <algorithm> #include <cstddef> #include <cstdint> #include <memory> #include <optional> #include <string> #include <vector> namespace cudf { namespace io { namespace orc { static constexpr uint32_t block_header_size = 3; // Seconds from January 1st, 1970 to January 1st, 2015 static constexpr int64_t orc_utc_epoch = 1420070400; // Used for the nanosecond remainder in timestamp statistics when the actual nanoseconds of min/max // are not included. As the timestamp statistics are stored as milliseconds + nanosecond remainder, // the maximum nanosecond remainder is 999,999 (nanoseconds in a millisecond - 1). static constexpr int32_t DEFAULT_MIN_NANOS = 0; static constexpr int32_t DEFAULT_MAX_NANOS = 999'999; struct PostScript { uint64_t footerLength = 0; // the length of the footer section in bytes CompressionKind compression = NONE; // the kind of generic compression used uint32_t compressionBlockSize{}; // the maximum size of each compression chunk std::vector<uint32_t> version; // the version of the writer [major, minor] uint64_t metadataLength = 0; // the length of the metadata section in bytes std::string magic = ""; // the fixed string "ORC" }; struct StripeInformation { uint64_t offset = 0; // the start of the stripe within the file uint64_t indexLength = 0; // the length of the indexes in bytes uint64_t dataLength = 0; // the length of the data in bytes uint32_t footerLength = 0; // the length of the footer in bytes uint32_t numberOfRows = 0; // the number of rows in the stripe }; struct SchemaType { TypeKind kind = INVALID_TYPE_KIND; // the kind of this type std::vector<uint32_t> subtypes; // the type ids of any subcolumns for list, map, struct, or union std::vector<std::string> fieldNames; // the list of field names for struct std::optional<uint32_t> maximumLength; // the maximum length of the type for varchar or char in UTF-8 characters std::optional<uint32_t> precision; // the precision for decimal std::optional<uint32_t> scale; // the scale for decimal }; struct UserMetadataItem { std::string name; // the user defined key std::string value; // the user defined binary value as string }; using ColStatsBlob = std::vector<uint8_t>; // Column statistics blob struct FileFooter { uint64_t headerLength = 0; // the length of the file header in bytes (always 3) uint64_t contentLength = 0; // the length of the file header and body in bytes std::vector<StripeInformation> stripes; // the information about the stripes std::vector<SchemaType> types; // the schema information std::vector<UserMetadataItem> metadata; // the user metadata that was added uint64_t numberOfRows = 0; // the total number of rows in the file std::vector<ColStatsBlob> statistics; // Column statistics blobs uint32_t rowIndexStride = 0; // the maximum number of rows in each index entry }; struct Stream { StreamKind kind = INVALID_STREAM_KIND; std::optional<uint32_t> column_id; // ORC column id (different from column index in the table!) uint64_t length = 0; // the number of bytes in the stream // Returns index of the column in the table, if any // Stream of the 'column 0' does not have a corresponding column in the table [[nodiscard]] std::optional<uint32_t> column_index() const noexcept { return column_id.value_or(0) > 0 ? std::optional<uint32_t>{*column_id - 1} : std::optional<uint32_t>{}; } }; struct ColumnEncoding { ColumnEncodingKind kind = INVALID_ENCODING_KIND; uint32_t dictionarySize = 0; // for dictionary encodings, record the size of the dictionary }; struct StripeFooter { std::vector<Stream> streams; // the location of each stream std::vector<ColumnEncoding> columns; // the encoding of each column std::string writerTimezone = ""; // time zone of the writer }; /** * @brief Contains per-column ORC statistics. * * At most one of the `***_statistics` members has a value. */ struct column_statistics { std::optional<uint64_t> number_of_values; std::optional<integer_statistics> int_stats; std::optional<double_statistics> double_stats; std::optional<string_statistics> string_stats; std::optional<bucket_statistics> bucket_stats; std::optional<decimal_statistics> decimal_stats; std::optional<date_statistics> date_stats; std::optional<binary_statistics> binary_stats; std::optional<timestamp_statistics> timestamp_stats; std::optional<bool> has_null; }; struct StripeStatistics { std::vector<ColStatsBlob> colStats; // Column statistics blobs }; struct Metadata { std::vector<StripeStatistics> stripeStats; }; int inline constexpr encode_field_number(int field_number, ProtofType field_type) noexcept { return (field_number * 8) + static_cast<int>(field_type); } namespace { template <typename base_t, std::enable_if_t<!std::is_arithmetic_v<base_t> and !std::is_enum_v<base_t>>* = nullptr> int static constexpr encode_field_number_base(int field_number) noexcept { return encode_field_number(field_number, ProtofType::FIXEDLEN); } template <typename base_t, std::enable_if_t<std::is_integral_v<base_t> or std::is_enum_v<base_t>>* = nullptr> int static constexpr encode_field_number_base(int field_number) noexcept { return encode_field_number(field_number, ProtofType::VARINT); } template <typename base_t, std::enable_if_t<std::is_same_v<base_t, float>>* = nullptr> int static constexpr encode_field_number_base(int field_number) noexcept { return encode_field_number(field_number, ProtofType::FIXED32); } template <typename base_t, std::enable_if_t<std::is_same_v<base_t, double>>* = nullptr> int static constexpr encode_field_number_base(int field_number) noexcept { return encode_field_number(field_number, ProtofType::FIXED64); } }; // namespace template <typename T, std::enable_if_t<!std::is_class_v<T> or std::is_same_v<T, std::string>>* = nullptr> int constexpr encode_field_number(int field_number) noexcept { return encode_field_number_base<T>(field_number); } // containers change the field number encoding template <typename T, std::enable_if_t<std::is_same_v<T, std::vector<typename T::value_type>>>* = nullptr> int constexpr encode_field_number(int field_number) noexcept { return encode_field_number_base<T>(field_number); } // optional fields don't change the field number encoding template <typename T, std::enable_if_t<std::is_same_v<T, std::optional<typename T::value_type>>>* = nullptr> int constexpr encode_field_number(int field_number) noexcept { return encode_field_number_base<typename T::value_type>(field_number); } /** * @brief Class for parsing Orc's Protocol Buffers encoded metadata */ class ProtobufReader { public: ProtobufReader(uint8_t const* base, size_t len) : m_base(base), m_cur(base), m_end(base + len) {} template <typename T> void read(T& s) { read(s, m_end - m_cur); } void read(PostScript&, size_t maxlen); void read(FileFooter&, size_t maxlen); void read(StripeInformation&, size_t maxlen); void read(SchemaType&, size_t maxlen); void read(UserMetadataItem&, size_t maxlen); void read(StripeFooter&, size_t maxlen); void read(Stream&, size_t maxlen); void read(ColumnEncoding&, size_t maxlen); void read(integer_statistics&, size_t maxlen); void read(double_statistics&, size_t maxlen); void read(string_statistics&, size_t maxlen); void read(bucket_statistics&, size_t maxlen); void read(decimal_statistics&, size_t maxlen); void read(date_statistics&, size_t maxlen); void read(binary_statistics&, size_t maxlen); void read(timestamp_statistics&, size_t maxlen); void read(column_statistics&, size_t maxlen); void read(StripeStatistics&, size_t maxlen); void read(Metadata&, size_t maxlen); private: template <int index> friend class FunctionSwitchImpl; void skip_bytes(size_t bytecnt) { bytecnt = std::min(bytecnt, (size_t)(m_end - m_cur)); m_cur += bytecnt; } template <typename T> T get(); void skip_struct_field(int t); template <typename T, typename... Operator> void function_builder(T& s, size_t maxlen, std::tuple<Operator...>& op); uint32_t read_field_size(uint8_t const* end); template <typename T, std::enable_if_t<std::is_integral_v<T>>* = nullptr> void read_field(T& value, uint8_t const* end) { value = get<T>(); } template <typename T, std::enable_if_t<std::is_enum_v<T>>* = nullptr> void read_field(T& value, uint8_t const* end) { value = static_cast<T>(get<uint32_t>()); } template <typename T, std::enable_if_t<std::is_same_v<T, std::string>>* = nullptr> void read_field(T& value, uint8_t const* end) { auto const size = read_field_size(end); value.assign(reinterpret_cast<char const*>(m_cur), size); m_cur += size; } template <typename T, std::enable_if_t<std::is_same_v<T, std::vector<std::string>>>* = nullptr> void read_field(T& value, uint8_t const* end) { auto const size = read_field_size(end); value.emplace_back(reinterpret_cast<char const*>(m_cur), size); m_cur += size; } template <typename T, std::enable_if_t<std::is_same_v<T, std::vector<typename T::value_type>> and !std::is_same_v<std::string, typename T::value_type>>* = nullptr> void read_field(T& value, uint8_t const* end) { auto const size = read_field_size(end); value.emplace_back(); read(value.back(), size); } template <typename T, std::enable_if_t<std::is_same_v<T, std::optional<typename T::value_type>>>* = nullptr> void read_field(T& value, uint8_t const* end) { typename T::value_type contained_value; read_field(contained_value, end); value = std::optional<typename T::value_type>{std::move(contained_value)}; } template <typename T> auto read_field(T& value, uint8_t const* end) -> decltype(read(value, 0)) { auto const size = read_field_size(end); read(value, size); } template <typename T, std::enable_if_t<std::is_floating_point_v<T>>* = nullptr> void read_field(T& value, uint8_t const* end) { memcpy(&value, m_cur, sizeof(T)); m_cur += sizeof(T); } template <typename T> void read_packed_field(T& value, uint8_t const* end) { auto const len = get<uint32_t>(); auto const field_end = std::min(m_cur + len, end); while (m_cur < field_end) value.push_back(get<typename T::value_type>()); } template <typename T> void read_raw_field(T& value, uint8_t const* end) { auto const size = read_field_size(end); value.emplace_back(m_cur, m_cur + size); m_cur += size; } template <typename T> struct field_reader { int const encoded_field_number; T& output_value; field_reader(int field_number, T& field_value) : encoded_field_number(encode_field_number<T>(field_number)), output_value(field_value) { } inline void operator()(ProtobufReader* pbr, uint8_t const* end) { pbr->read_field(output_value, end); } }; template <typename T> struct packed_field_reader { int const encoded_field_number; T& output_value; packed_field_reader(int field_number, T& field_value) : encoded_field_number(encode_field_number<T>(field_number)), output_value(field_value) { } inline void operator()(ProtobufReader* pbr, uint8_t const* end) { pbr->read_packed_field(output_value, end); } }; template <typename T> struct raw_field_reader { int const encoded_field_number; T& output_value; raw_field_reader(int field_number, T& field_value) : encoded_field_number(encode_field_number<T>(field_number)), output_value(field_value) { } inline void operator()(ProtobufReader* pbr, uint8_t const* end) { pbr->read_raw_field(output_value, end); } }; uint8_t const* const m_base; uint8_t const* m_cur; uint8_t const* const m_end; }; template <> inline uint8_t ProtobufReader::get<uint8_t>() { return (m_cur < m_end) ? *m_cur++ : 0; }; template <> inline bool ProtobufReader::get<bool>() { return static_cast<bool>(get<uint8_t>()); }; template <> inline uint32_t ProtobufReader::get<uint32_t>() { uint32_t v = 0; for (uint32_t l = 0;; l += 7) { uint32_t c = get<uint8_t>(); v |= (c & 0x7f) << l; if (c < 0x80) return v; } } template <> inline uint64_t ProtobufReader::get<uint64_t>() { uint64_t v = 0; for (uint64_t l = 0;; l += 7) { uint64_t c = get<uint8_t>(); v |= (c & 0x7f) << l; if (c < 0x80) return v; } } template <typename T> auto decode_zigzag(T u) { using signed_t = std::make_signed_t<T>; return static_cast<signed_t>((u >> 1u) ^ -static_cast<signed_t>(u & 1)); } template <> inline int32_t ProtobufReader::get<int32_t>() { return decode_zigzag(get<uint32_t>()); } template <> inline int64_t ProtobufReader::get<int64_t>() { return decode_zigzag(get<uint64_t>()); } /** * @brief Class for encoding Orc's metadata with Protocol Buffers */ class ProtobufWriter { public: ProtobufWriter() = default; ProtobufWriter(std::size_t bytes) : m_buff(bytes) {} uint32_t put_byte(uint8_t v) { m_buff.push_back(v); return 1; } template <typename T> uint32_t put_bytes(host_span<T const> values) { static_assert(sizeof(T) == 1); m_buff.reserve(m_buff.size() + values.size()); m_buff.insert(m_buff.end(), values.begin(), values.end()); return values.size(); } uint32_t put_uint(uint64_t v) { int l = 1; while (v > 0x7f) { put_byte(static_cast<uint8_t>(v | 0x80)); v >>= 7; l++; } put_byte(static_cast<uint8_t>(v)); return l; } uint32_t put_int(int64_t v) { int64_t s = (v < 0); return put_uint(((v ^ -s) << 1) + s); } void put_row_index_entry(int32_t present_blk, int32_t present_ofs, int32_t data_blk, int32_t data_ofs, int32_t data2_blk, int32_t data2_ofs, TypeKind kind, ColStatsBlob const* stats); std::size_t size() const { return m_buff.size(); } uint8_t const* data() { return m_buff.data(); } std::vector<uint8_t>& buffer() { return m_buff; } std::vector<uint8_t> release() { return std::move(m_buff); } public: size_t write(PostScript const&); size_t write(FileFooter const&); size_t write(StripeInformation const&); size_t write(SchemaType const&); size_t write(UserMetadataItem const&); size_t write(StripeFooter const&); size_t write(Stream const&); size_t write(ColumnEncoding const&); size_t write(StripeStatistics const&); size_t write(Metadata const&); protected: std::vector<uint8_t> m_buff; struct ProtobufFieldWriter; }; /** * @brief Class for decompressing Orc data blocks using the CPU */ class OrcDecompressor { public: OrcDecompressor(CompressionKind kind, uint32_t blockSize); /** * @brief ORC block decompression * * @param src compressed data * @param stream CUDA stream used for device memory operations and kernel launches * * @return decompressed data */ host_span<uint8_t const> decompress_blocks(host_span<uint8_t const> src, rmm::cuda_stream_view stream); [[nodiscard]] uint32_t GetLog2MaxCompressionRatio() const { return m_log2MaxRatio; } [[nodiscard]] uint32_t GetMaxUncompressedBlockSize(uint32_t block_len) const { return std::min(block_len << m_log2MaxRatio, m_blockSize); } [[nodiscard]] compression_type compression() const { return _compression; } [[nodiscard]] uint32_t GetBlockSize() const { return m_blockSize; } protected: compression_type _compression; uint32_t m_log2MaxRatio = 24; // log2 of maximum compression ratio uint32_t m_blockSize; std::vector<uint8_t> m_buf; }; /** * @brief Stores orc id for each column and number of children in that column. * * @code{.pseudo} * Consider following data where a struct has two members and a list column * {"struct": [{"a": 1, "b": 2}, {"a":3, "b":5}], "list":[[1, 2], [2, 3]]} * * `orc_column_meta` for struct column would be * id = 0 * num_children = 2 * * `orc_column_meta` for list column would be * id = 3 * num_children = 1 * @endcode * */ struct orc_column_meta { size_type id; // orc id for the column size_type num_children; // number of children at the same level of nesting in case of struct }; /** * @brief Stores column's validity map and null count */ struct column_validity_info { uint32_t* valid_map_base; uint32_t null_count; }; /** * @brief A helper class for ORC file metadata. Provides some additional * convenience methods for initializing and accessing metadata. */ class metadata { using OrcStripeInfo = std::pair<StripeInformation const*, StripeFooter const*>; public: struct stripe_source_mapping { int source_idx; std::vector<OrcStripeInfo> stripe_info; }; public: explicit metadata(datasource* const src, rmm::cuda_stream_view stream); [[nodiscard]] size_t get_total_rows() const { return ff.numberOfRows; } [[nodiscard]] int get_num_stripes() const { return ff.stripes.size(); } [[nodiscard]] int get_num_columns() const { return ff.types.size(); } /** * @brief Returns the name of the column with the given ID. * * Name might not be unique in the ORC file, since columns with different parents are allowed to * have the same names. */ [[nodiscard]] std::string const& column_name(size_type column_id) const { CUDF_EXPECTS(column_id < get_num_columns(), "Out of range column id provided"); return column_names[column_id]; } /** * @brief Returns the full name of the column with the given ID - includes the ancestor columns * names. * * Each column in the ORC file has a unique path. */ [[nodiscard]] std::string const& column_path(size_type column_id) const { CUDF_EXPECTS(column_id < get_num_columns(), "Out of range column id provided"); return column_paths[column_id]; } [[nodiscard]] int get_row_index_stride() const { return ff.rowIndexStride; } /** * @brief Returns the ID of the parent column of the given column. */ [[nodiscard]] size_type parent_id(size_type column_id) const { return parents.at(column_id).value().id; } /** * @brief Returns the index the given column has in its parent's children list. */ [[nodiscard]] size_type field_index(size_type column_id) const { return parents.at(column_id).value().field_idx; } /** * @brief Returns whether the given column has a parent. */ [[nodiscard]] size_type column_has_parent(size_type column_id) const { return parents.at(column_id).has_value(); } public: PostScript ps; FileFooter ff; Metadata md; std::vector<StripeFooter> stripefooters; std::unique_ptr<OrcDecompressor> decompressor; datasource* const source; private: struct column_parent { // parent's ID size_type id; // Index of this column in the parent's list of children size_type field_idx; column_parent(size_type parent_id, size_type field_idx) : id{parent_id}, field_idx{field_idx} {} }; void init_parent_descriptors(); std::vector<std::optional<column_parent>> parents; void init_column_names(); std::vector<std::string> column_names; std::vector<std::string> column_paths; }; /** * @brief `column_device_view` and additional, ORC specific, information on the column. */ struct orc_column_device_view : public column_device_view { __device__ orc_column_device_view(column_device_view col, thrust::optional<uint32_t> parent_idx) : column_device_view{col}, parent_index{parent_idx} { } thrust::optional<uint32_t> parent_index; bitmask_type const* pushdown_mask = nullptr; }; /** * @brief Range of rows within a single rowgroup. */ struct rowgroup_rows { size_type begin; size_type end; [[nodiscard]] constexpr auto size() const noexcept { return end - begin; } }; } // namespace orc } // namespace io } // namespace cudf
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/orc/stats_enc.cu
/* * Copyright (c) 2020-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "orc_gpu.hpp" #include <io/utilities/block_utils.cuh> #include <cudf/io/orc_types.hpp> #include <cudf/strings/detail/convert/fixed_point_to_string.cuh> #include <rmm/cuda_stream_view.hpp> namespace cudf::io::orc::gpu { using strings::detail::fixed_point_string_size; // Nanosecond statistics should not be enabled until the spec version is set correctly in the output // files. See https://github.com/rapidsai/cudf/issues/14325 for more details constexpr bool enable_nanosecond_statistics = false; constexpr unsigned int init_threads_per_group = 32; constexpr unsigned int init_groups_per_block = 4; constexpr unsigned int init_threads_per_block = init_threads_per_group * init_groups_per_block; __global__ void __launch_bounds__(init_threads_per_block) gpu_init_statistics_groups(statistics_group* groups, stats_column_desc const* cols, device_2dspan<rowgroup_rows const> rowgroup_bounds) { __shared__ __align__(4) statistics_group group_g[init_groups_per_block]; auto const col_id = blockIdx.y; auto const chunk_id = (blockIdx.x * init_groups_per_block) + threadIdx.y; auto const t = threadIdx.x; auto const num_rowgroups = rowgroup_bounds.size().first; statistics_group* group = &group_g[threadIdx.y]; if (chunk_id < num_rowgroups and t == 0) { group->col = &cols[col_id]; group->start_row = rowgroup_bounds[chunk_id][col_id].begin; group->num_rows = rowgroup_bounds[chunk_id][col_id].size(); groups[col_id * num_rowgroups + chunk_id] = *group; } } /** * @brief Get the buffer size and offsets of encoded statistics * * @param[in,out] groups Statistics merge groups * @param[in] statistics_count Number of statistics buffers */ constexpr unsigned int buffersize_reduction_dim = 32; constexpr unsigned int block_size = buffersize_reduction_dim * buffersize_reduction_dim; constexpr unsigned int pb_fld_hdrlen = 1; constexpr unsigned int pb_fld_hdrlen32 = 5; constexpr unsigned int pb_fldlen_int32 = 5; constexpr unsigned int pb_fldlen_int64 = 10; constexpr unsigned int pb_fldlen_float64 = 8; constexpr unsigned int pb_fldlen_bucket1 = 1 + pb_fldlen_int64; // statistics field number + number of values + has null constexpr unsigned int pb_fldlen_common = pb_fld_hdrlen + (pb_fld_hdrlen + pb_fldlen_int64) + 2 * pb_fld_hdrlen; template <unsigned int block_size> __global__ void __launch_bounds__(block_size, 1) gpu_init_statistics_buffersize(statistics_merge_group* groups, statistics_chunk const* chunks, uint32_t statistics_count) { using block_scan = cub::BlockScan<uint32_t, block_size, cub::BLOCK_SCAN_WARP_SCANS>; __shared__ typename block_scan::TempStorage temp_storage; uint32_t stats_size = 0; auto t = threadIdx.x; __syncthreads(); for (thread_index_type start = 0; start < statistics_count; start += block_size) { uint32_t stats_len = 0, stats_pos; auto idx = start + t; if (idx < statistics_count) { statistics_dtype const dtype = groups[idx].stats_dtype; switch (dtype) { case dtype_bool: stats_len = pb_fldlen_common + pb_fld_hdrlen + pb_fldlen_bucket1; break; case dtype_int8: case dtype_int16: case dtype_int32: case dtype_int64: stats_len = pb_fldlen_common + pb_fld_hdrlen + 3 * (pb_fld_hdrlen + pb_fldlen_int64); break; case dtype_date32: stats_len = pb_fldlen_common + pb_fld_hdrlen + 2 * (pb_fld_hdrlen + pb_fldlen_int64); break; case dtype_timestamp64: stats_len = pb_fldlen_common + pb_fld_hdrlen + 4 * (pb_fld_hdrlen + pb_fldlen_int64); if constexpr (enable_nanosecond_statistics) { stats_len += 2 * (pb_fld_hdrlen + pb_fldlen_int32); } break; case dtype_float32: case dtype_float64: stats_len = pb_fldlen_common + pb_fld_hdrlen + 3 * (pb_fld_hdrlen + pb_fldlen_float64); break; case dtype_decimal64: case dtype_decimal128: { auto const scale = groups[idx].col_dtype.scale(); auto const min_size = fixed_point_string_size(chunks[idx].min_value.d128_val, scale); auto const max_size = fixed_point_string_size(chunks[idx].max_value.d128_val, scale); auto const sum_size = fixed_point_string_size(chunks[idx].sum.d128_val, scale); // common + total field length + encoded string lengths + strings stats_len = pb_fldlen_common + pb_fld_hdrlen32 + 3 * (pb_fld_hdrlen + pb_fld_hdrlen32) + min_size + max_size + sum_size; } break; case dtype_string: stats_len = pb_fldlen_common + pb_fld_hdrlen32 + 3 * (pb_fld_hdrlen + pb_fld_hdrlen32) + chunks[idx].min_value.str_val.length + chunks[idx].max_value.str_val.length; break; case dtype_none: stats_len = pb_fldlen_common; default: break; } } uint32_t tmp_stats_size; block_scan(temp_storage).ExclusiveSum(stats_len, stats_pos, tmp_stats_size); stats_pos += stats_size; stats_size += tmp_stats_size; if (idx < statistics_count) { groups[idx].start_chunk = stats_pos; groups[idx].num_chunks = stats_len; } __syncthreads(); } } struct stats_state_s { uint8_t* base{}; ///< Output buffer start uint8_t* end{}; ///< Output buffer end statistics_chunk chunk{}; statistics_merge_group group{}; statistics_dtype stats_dtype{}; //!< Statistics data type for this column }; /* * Protobuf encoding - see * https://developers.google.com/protocol-buffers/docs/encoding */ // Protobuf varint encoding for unsigned int __device__ inline uint8_t* pb_encode_uint(uint8_t* p, uint64_t v) { while (v > 0x7f) { *p++ = ((uint32_t)v | 0x80); v >>= 7; } *p++ = v; return p; } // Protobuf field encoding for unsigned int __device__ inline uint8_t* pb_put_uint(uint8_t* p, uint32_t id, uint64_t v) { p[0] = id * 8 + static_cast<ProtofType>(ProtofType::VARINT); // NOTE: Assumes id < 16 return pb_encode_uint(p + 1, v); } // Protobuf field encoding for signed int __device__ inline uint8_t* pb_put_int(uint8_t* p, uint32_t id, int64_t v) { int64_t s = (v < 0); return pb_put_uint(p, id, (v ^ -s) * 2 + s); } // Protobuf field encoding for 'packed' unsigned int (single value) __device__ inline uint8_t* pb_put_packed_uint(uint8_t* p, uint32_t id, uint64_t v) { uint8_t* p2 = pb_encode_uint(p + 2, v); p[0] = id * 8 + ProtofType::FIXEDLEN; p[1] = static_cast<uint8_t>(p2 - (p + 2)); return p2; } // Protobuf field encoding for binary/string __device__ inline uint8_t* pb_put_binary(uint8_t* p, uint32_t id, void const* bytes, uint32_t len) { p[0] = id * 8 + ProtofType::FIXEDLEN; p = pb_encode_uint(p + 1, len); memcpy(p, bytes, len); return p + len; } __device__ inline uint8_t* pb_put_decimal( uint8_t* p, uint32_t id, __int128_t value, int32_t scale, int32_t len) { p[0] = id * 8 + ProtofType::FIXEDLEN; p = pb_encode_uint(p + 1, len); strings::detail::fixed_point_to_string(value, scale, reinterpret_cast<char*>(p)); return p + len; } // Protobuf field encoding for 64-bit raw encoding (double) __device__ inline uint8_t* pb_put_fixed64(uint8_t* p, uint32_t id, void const* raw64) { p[0] = id * 8 + ProtofType::FIXED64; memcpy(p + 1, raw64, 8); return p + 9; } // Splits a nanosecond timestamp into milliseconds and nanoseconds __device__ std::pair<int64_t, int32_t> split_nanosecond_timestamp(int64_t nano_count) { auto const ns = cuda::std::chrono::nanoseconds(nano_count); auto const ms_floor = cuda::std::chrono::floor<cuda::std::chrono::milliseconds>(ns); auto const ns_remainder = ns - ms_floor; return {ms_floor.count(), ns_remainder.count()}; } /** * @brief Encode statistics in ORC protobuf format * * @param[in,out] groups Statistics merge groups * @param[in,out] chunks Statistics data * @param[in] statistics_count Number of statistics buffers * * ORC statistics format from https://orc.apache.org/specification/ORCv1/ * * message ColumnStatistics { * // the number of values * optional uint64 numberOfValues = 1; * // At most one of these has a value for any column * optional IntegerStatistics intStatistics = 2; * optional DoubleStatistics doubleStatistics = 3; * optional StringStatistics stringStatistics = 4; * optional BucketStatistics bucketStatistics = 5; * optional DecimalStatistics decimalStatistics = 6; * optional DateStatistics dateStatistics = 7; * optional BinaryStatistics binaryStatistics = 8; * optional TimestampStatistics timestampStatistics = 9; * optional bool hasNull = 10; * } */ constexpr unsigned int encode_threads_per_chunk = 32; constexpr unsigned int encode_chunks_per_block = 4; constexpr unsigned int encode_threads_per_block = encode_threads_per_chunk * encode_chunks_per_block; __global__ void __launch_bounds__(encode_threads_per_block) gpu_encode_statistics(uint8_t* blob_bfr, statistics_merge_group* groups, statistics_chunk const* chunks, uint32_t statistics_count) { __shared__ __align__(8) stats_state_s state_g[encode_chunks_per_block]; auto t = threadIdx.x; auto idx = blockIdx.x * encode_chunks_per_block + threadIdx.y; stats_state_s* const s = &state_g[threadIdx.y]; // Encode and update actual bfr size if (idx < statistics_count && t == 0) { s->chunk = chunks[idx]; s->group = groups[idx]; s->stats_dtype = s->group.stats_dtype; s->base = blob_bfr + s->group.start_chunk; s->end = blob_bfr + s->group.start_chunk + s->group.num_chunks; uint8_t* cur = pb_put_uint(s->base, 1, s->chunk.non_nulls); cur = pb_put_uint(cur, 10, s->chunk.null_count != 0); // hasNull (bool) uint8_t* fld_start = cur; switch (s->stats_dtype) { case dtype_int8: case dtype_int16: case dtype_int32: case dtype_int64: // intStatistics = 2 // message IntegerStatistics { // optional sint64 minimum = 1; // optional sint64 maximum = 2; // optional sint64 sum = 3; // } if (s->chunk.has_minmax || s->chunk.has_sum) { *cur = 2 * 8 + ProtofType::FIXEDLEN; cur += 2; if (s->chunk.has_minmax) { cur = pb_put_int(cur, 1, s->chunk.min_value.i_val); cur = pb_put_int(cur, 2, s->chunk.max_value.i_val); } if (s->chunk.has_sum) { cur = pb_put_int(cur, 3, s->chunk.sum.i_val); } fld_start[1] = cur - (fld_start + 2); } break; case dtype_float32: case dtype_float64: // doubleStatistics = 3 // message DoubleStatistics { // optional double minimum = 1; // optional double maximum = 2; // optional double sum = 3; // } if (s->chunk.has_minmax || s->chunk.has_sum) { *cur = 3 * 8 + ProtofType::FIXEDLEN; cur += 2; if (s->chunk.has_minmax) { cur = pb_put_fixed64(cur, 1, &s->chunk.min_value.fp_val); cur = pb_put_fixed64(cur, 2, &s->chunk.max_value.fp_val); } if (s->chunk.has_sum) { cur = pb_put_fixed64(cur, 3, &s->chunk.sum.fp_val); } fld_start[1] = cur - (fld_start + 2); } break; case dtype_string: // stringStatistics = 4 // message StringStatistics { // optional string minimum = 1; // optional string maximum = 2; // optional sint64 sum = 3; // sum will store the total length of all strings // } if (s->chunk.has_minmax || s->chunk.has_sum) { uint32_t sz = 0; if (s->chunk.has_minmax) { sz += (pb_put_uint(cur, 1, s->chunk.min_value.str_val.length) - cur) + (pb_put_uint(cur, 2, s->chunk.max_value.str_val.length) - cur) + s->chunk.min_value.str_val.length + s->chunk.max_value.str_val.length; } if (s->chunk.has_sum) { sz += pb_put_int(cur, 3, s->chunk.sum.i_val) - cur; } cur[0] = 4 * 8 + ProtofType::FIXEDLEN; cur = pb_encode_uint(cur + 1, sz); if (s->chunk.has_minmax) { cur = pb_put_binary( cur, 1, s->chunk.min_value.str_val.ptr, s->chunk.min_value.str_val.length); cur = pb_put_binary( cur, 2, s->chunk.max_value.str_val.ptr, s->chunk.max_value.str_val.length); } if (s->chunk.has_sum) { cur = pb_put_int(cur, 3, s->chunk.sum.i_val); } } break; case dtype_bool: // bucketStatistics = 5 // message BucketStatistics { // repeated uint64 count = 1 [packed=true]; // } if (s->chunk.has_sum) { cur[0] = 5 * 8 + ProtofType::FIXEDLEN; // count is equal to the number of 'true' values, despite what specs say cur = pb_put_packed_uint(cur + 2, 1, s->chunk.sum.u_val); fld_start[1] = cur - (fld_start + 2); } break; case dtype_decimal64: case dtype_decimal128: // decimalStatistics = 6 // message DecimalStatistics { // optional string minimum = 1; // optional string maximum = 2; // optional string sum = 3; // } if (s->chunk.has_minmax or s->chunk.has_sum) { auto const scale = s->group.col_dtype.scale(); uint32_t sz = 0; auto const min_size = s->chunk.has_minmax ? fixed_point_string_size(s->chunk.min_value.d128_val, scale) : 0; auto const max_size = s->chunk.has_minmax ? fixed_point_string_size(s->chunk.max_value.d128_val, scale) : 0; if (s->chunk.has_minmax) { // encoded string lengths, plus the strings sz += (pb_put_uint(cur, 1, min_size) - cur) + min_size + (pb_put_uint(cur, 1, max_size) - cur) + max_size; } auto const sum_size = s->chunk.has_sum ? fixed_point_string_size(s->chunk.sum.d128_val, scale) : 0; if (s->chunk.has_sum) { sz += (pb_put_uint(cur, 1, sum_size) - cur) + sum_size; } cur[0] = 6 * 8 + ProtofType::FIXEDLEN; cur = pb_encode_uint(cur + 1, sz); if (s->chunk.has_minmax) { cur = pb_put_decimal(cur, 1, s->chunk.min_value.d128_val, scale, min_size); // minimum cur = pb_put_decimal(cur, 2, s->chunk.max_value.d128_val, scale, max_size); // maximum } if (s->chunk.has_sum) { cur = pb_put_decimal(cur, 3, s->chunk.sum.d128_val, scale, sum_size); // sum } } break; case dtype_date32: // dateStatistics = 7 // message DateStatistics { // min,max values saved as days since epoch // optional sint32 minimum = 1; // optional sint32 maximum = 2; // } if (s->chunk.has_minmax) { cur[0] = 7 * 8 + ProtofType::FIXEDLEN; cur += 2; cur = pb_put_int(cur, 1, s->chunk.min_value.i_val); cur = pb_put_int(cur, 2, s->chunk.max_value.i_val); fld_start[1] = cur - (fld_start + 2); } break; case dtype_timestamp64: // timestampStatistics = 9 // message TimestampStatistics { // optional sint64 minimum = 1; // min,max values saved as milliseconds since epoch // optional sint64 maximum = 2; // optional sint64 minimumUtc = 3; // min,max values saved as milliseconds since UNIX epoch // optional sint64 maximumUtc = 4; // optional int32 minimumNanos = 5; // lower 6 TS digits for min/max to achieve nanosecond // precision // optional int32 maximumNanos = 6; // } if (s->chunk.has_minmax) { cur[0] = 9 * 8 + ProtofType::FIXEDLEN; cur += 2; auto const [min_ms, min_ns_remainder] = split_nanosecond_timestamp(s->chunk.min_value.i_val); auto const [max_ms, max_ns_remainder] = split_nanosecond_timestamp(s->chunk.max_value.i_val); // minimum/maximum are the same as minimumUtc/maximumUtc as we always write files in UTC cur = pb_put_int(cur, 1, min_ms); // minimum cur = pb_put_int(cur, 2, max_ms); // maximum cur = pb_put_int(cur, 3, min_ms); // minimumUtc cur = pb_put_int(cur, 4, max_ms); // maximumUtc if constexpr (enable_nanosecond_statistics) { if (min_ns_remainder != DEFAULT_MIN_NANOS) { // using uint because positive values are not zigzag encoded cur = pb_put_uint(cur, 5, min_ns_remainder + 1); // minimumNanos } if (max_ns_remainder != DEFAULT_MAX_NANOS) { // using uint because positive values are not zigzag encoded cur = pb_put_uint(cur, 6, max_ns_remainder + 1); // maximumNanos } } fld_start[1] = cur - (fld_start + 2); } break; default: break; } groups[idx].num_chunks = static_cast<uint32_t>(cur - s->base); } } void orc_init_statistics_groups(statistics_group* groups, stats_column_desc const* cols, device_2dspan<rowgroup_rows const> rowgroup_bounds, rmm::cuda_stream_view stream) { dim3 dim_grid((rowgroup_bounds.size().first + init_groups_per_block - 1) / init_groups_per_block, rowgroup_bounds.size().second); dim3 dim_block(init_threads_per_group, init_groups_per_block); gpu_init_statistics_groups<<<dim_grid, dim_block, 0, stream.value()>>>( groups, cols, rowgroup_bounds); } /** * @brief Launches kernels to return statistics buffer offsets and sizes * * @param[in,out] groups Statistics merge groups * @param[in] chunks Statistics chunks * @param[in] statistics_count Number of statistics buffers to encode * @param[in] stream CUDA stream used for device memory operations and kernel launches */ void orc_init_statistics_buffersize(statistics_merge_group* groups, statistics_chunk const* chunks, uint32_t statistics_count, rmm::cuda_stream_view stream) { gpu_init_statistics_buffersize<block_size> <<<1, block_size, 0, stream.value()>>>(groups, chunks, statistics_count); } /** * @brief Launches kernel to encode statistics in ORC protobuf format * * @param[out] blob_bfr Output buffer for statistics blobs * @param[in,out] groups Statistics merge groups * @param[in,out] chunks Statistics data * @param[in] statistics_count Number of statistics buffers */ void orc_encode_statistics(uint8_t* blob_bfr, statistics_merge_group* groups, statistics_chunk const* chunks, uint32_t statistics_count, rmm::cuda_stream_view stream) { unsigned int num_blocks = (statistics_count + encode_chunks_per_block - 1) / encode_chunks_per_block; dim3 dim_block(encode_threads_per_chunk, encode_chunks_per_block); gpu_encode_statistics<<<num_blocks, dim_block, 0, stream.value()>>>( blob_bfr, groups, chunks, statistics_count); } } // namespace cudf::io::orc::gpu
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/statistics/temp_storage_wrapper.cuh
/* * Copyright (c) 2021-2022, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * @file temp_storage_wrapper.cuh * @brief Temporary storage for cub calls and helper wrapper class */ #pragma once #include "byte_array_view.cuh" #include "statistics.cuh" #include <cudf/fixed_point/fixed_point.hpp> #include <cudf/strings/string_view.cuh> #include <cudf/wrappers/durations.hpp> #include <cudf/wrappers/timestamps.hpp> #include <cub/cub.cuh> namespace cudf { namespace io { namespace detail { template <typename T, int block_size> using cub_temp_storage = typename cub::BlockReduce<T, block_size>::TempStorage; using statistics::byte_array_view; #define MEMBER_NAME(TYPE) TYPE##_stats #define DECLARE_MEMBER(TYPE) cub_temp_storage<TYPE, block_size> MEMBER_NAME(TYPE); /** * @brief Templated union to hold temporary storage to be used by cub reduce * calls * * @tparam block_size Dimension of the block */ template <int block_size> union block_reduce_storage { DECLARE_MEMBER(bool) DECLARE_MEMBER(int8_t) DECLARE_MEMBER(int16_t) DECLARE_MEMBER(int32_t) DECLARE_MEMBER(int64_t) DECLARE_MEMBER(__int128_t) DECLARE_MEMBER(uint8_t) DECLARE_MEMBER(uint16_t) DECLARE_MEMBER(uint32_t) DECLARE_MEMBER(uint64_t) DECLARE_MEMBER(float) DECLARE_MEMBER(double) DECLARE_MEMBER(string_view) DECLARE_MEMBER(byte_array_view) }; #define STORAGE_WRAPPER_GET(TYPE) \ template <typename T> \ __device__ std::enable_if_t<std::is_same_v<T, TYPE>, cub_temp_storage<TYPE, block_size>&> get() \ { \ return storage.MEMBER_NAME(TYPE); \ } /** * @brief Templated wrapper for block_reduce_storage to return member reference based on requested * type * * @tparam block_size Dimension of the block */ template <int block_size> struct storage_wrapper { block_reduce_storage<block_size>& storage; __device__ storage_wrapper(block_reduce_storage<block_size>& _temp_storage) : storage(_temp_storage) { } STORAGE_WRAPPER_GET(bool); STORAGE_WRAPPER_GET(int8_t); STORAGE_WRAPPER_GET(int16_t); STORAGE_WRAPPER_GET(int32_t); STORAGE_WRAPPER_GET(int64_t); STORAGE_WRAPPER_GET(__int128_t); STORAGE_WRAPPER_GET(uint8_t); STORAGE_WRAPPER_GET(uint16_t); STORAGE_WRAPPER_GET(uint32_t); STORAGE_WRAPPER_GET(uint64_t); STORAGE_WRAPPER_GET(float); STORAGE_WRAPPER_GET(double); STORAGE_WRAPPER_GET(string_view); STORAGE_WRAPPER_GET(byte_array_view); }; #undef DECLARE_MEMBER #undef MEMBER_NAME } // namespace detail } // namespace io } // namespace cudf
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/statistics/statistics.cuh
/* * Copyright (c) 2019-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * @file statistics.cuh * @brief Common structures and utility functions for statistics */ #pragma once #include "byte_array_view.cuh" #include <cudf/column/column_device_view.cuh> #include <cudf/lists/lists_column_view.hpp> #include <cudf/strings/string_view.hpp> #include <cudf/types.hpp> #include <rmm/cuda_stream_view.hpp> #include <cstdint> namespace cudf { namespace io { enum statistics_dtype { dtype_none, dtype_bool, dtype_int8, dtype_int16, dtype_int32, dtype_date32, dtype_int64, dtype_timestamp64, dtype_decimal64, dtype_decimal128, dtype_float32, dtype_float64, dtype_string, dtype_byte_array, }; struct stats_column_desc { statistics_dtype stats_dtype; //!< physical data type of column uint32_t num_rows; //!< number of rows in column uint32_t num_values; //!< Number of data values in column. Different from num_rows in case of //!< nested columns int32_t ts_scale; //!< timestamp scale (>0: multiply by scale, <0: divide by -scale) column_device_view const* leaf_column; //!< Pointer to leaf column column_device_view const* parent_column; //!< Pointer to parent column; nullptr if not list type }; template <typename ReturnType, typename InternalType> struct t_array_stats { InternalType const* ptr; //!< ptr to data size_type length; //!< length of data __host__ __device__ __forceinline__ volatile t_array_stats& operator=( ReturnType const& val) volatile { ptr = val.data(); length = val.size_bytes(); return *this; } __host__ __device__ __forceinline__ operator ReturnType() volatile { return ReturnType(ptr, length); } __host__ __device__ __forceinline__ operator ReturnType() const { return ReturnType(ptr, length); } __host__ __device__ __forceinline__ operator ReturnType() { return ReturnType(ptr, length); } }; using string_stats = t_array_stats<string_view, char>; using byte_array_view = statistics::byte_array_view; using byte_array_stats = t_array_stats<byte_array_view, byte_array_view::element_type>; union statistics_val { string_stats str_val; //!< string columns byte_array_stats byte_val; //!< byte array columns double fp_val; //!< float columns int64_t i_val; //!< integer columns uint64_t u_val; //!< unsigned integer columns __int128_t d128_val; //!< decimal128 columns }; struct statistics_chunk { uint32_t non_nulls{}; //!< number of non-null values in chunk uint32_t null_count{}; //!< number of null values in chunk statistics_val min_value{}; //!< minimum value in chunk statistics_val max_value{}; //!< maximum value in chunk statistics_val sum{}; //!< sum of chunk uint8_t has_minmax{}; //!< Nonzero if min_value and max_values are valid uint8_t has_sum{}; //!< Nonzero if sum is valid }; struct statistics_group { stats_column_desc const* col{}; //!< Column information uint32_t start_row{}; //!< Start row of this group uint32_t num_rows{}; //!< Number of rows in group uint32_t non_leaf_nulls{}; //!< Number of null non-leaf values in the group }; struct statistics_merge_group { data_type col_dtype; //!< Column data type statistics_dtype stats_dtype{dtype_none}; //!< Statistics data type for this column uint32_t start_chunk{}; //!< Start chunk of this group uint32_t num_chunks{}; //!< Number of chunks in group }; template <typename T, std::enable_if_t<!std::is_same_v<T, statistics::byte_array_view>>* = nullptr> __device__ T get_element(column_device_view const& col, uint32_t row) { return col.element<T>(row); } template <typename T, std::enable_if_t<std::is_same_v<T, statistics::byte_array_view>>* = nullptr> __device__ T get_element(column_device_view const& col, uint32_t row) { using et = typename T::element_type; size_type const index = row + col.offset(); // account for this view's _offset auto const* d_offsets = col.child(lists_column_view::offsets_column_index).data<size_type>(); auto const* d_data = col.child(lists_column_view::child_column_index).data<et>(); auto const offset = d_offsets[index]; return T(d_data + offset, d_offsets[index + 1] - offset); } } // namespace io } // namespace cudf
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/statistics/parquet_column_statistics.cu
/* * Copyright (c) 2021-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * @file parquet_column_statistics.cu * @brief Template specialization for PARQUET statistics calls */ #include "column_statistics.cuh" namespace cudf { namespace io { namespace detail { template <> void merge_group_statistics<detail::io_file_format::PARQUET>(statistics_chunk* chunks_out, statistics_chunk const* chunks_in, statistics_merge_group const* groups, uint32_t num_chunks, rmm::cuda_stream_view stream); template <> void calculate_group_statistics<detail::io_file_format::PARQUET>(statistics_chunk* chunks, statistics_group const* groups, uint32_t num_chunks, rmm::cuda_stream_view stream, bool int96_timestamp); } // namespace detail } // namespace io } // namespace cudf
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/statistics/statistics_type_identification.cuh
/* * Copyright (c) 2021-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * @file statistics_type_identification.cuh * @brief Utility classes to identify extrema, aggregate and conversion types for ORC and PARQUET */ #pragma once #include "byte_array_view.cuh" #include <cudf/fixed_point/fixed_point.hpp> #include <cudf/wrappers/timestamps.hpp> #include <cudf/strings/string_view.cuh> #include <cudf/wrappers/durations.hpp> #include <cudf/utilities/traits.hpp> #include "conversion_type_select.cuh" #include <tuple> namespace cudf { namespace io { namespace detail { using cudf::io::statistics::byte_array_view; enum class io_file_format { ORC, PARQUET }; enum class is_int96_timestamp { YES, NO }; template <io_file_format IO, is_int96_timestamp INT96> struct conversion_map; // Every timestamp or duration type is converted to nanoseconds in ORC statistics template <is_int96_timestamp INT96> struct conversion_map<io_file_format::ORC, INT96> { using types = std::tuple<std::pair<cudf::timestamp_s, cudf::timestamp_ns>, std::pair<cudf::timestamp_us, cudf::timestamp_ns>, std::pair<cudf::timestamp_ns, cudf::timestamp_ns>, std::pair<cudf::duration_s, cudf::duration_ns>, std::pair<cudf::duration_us, cudf::duration_ns>, std::pair<cudf::duration_ns, cudf::duration_ns>>; }; // In Parquet timestamps and durations with second resolution are converted to // milliseconds. Timestamps and durations with nanosecond resolution are // converted to microseconds. template <> struct conversion_map<io_file_format::PARQUET, is_int96_timestamp::YES> { using types = std::tuple<std::pair<cudf::timestamp_s, cudf::timestamp_ms>, std::pair<cudf::timestamp_ns, cudf::timestamp_us>, std::pair<cudf::duration_s, cudf::duration_ms>, std::pair<cudf::duration_ns, cudf::duration_us>>; }; // int64 nanosecond timestamp won't be converted template <> struct conversion_map<io_file_format::PARQUET, is_int96_timestamp::NO> { using types = std::tuple<std::pair<cudf::timestamp_s, cudf::timestamp_ms>, std::pair<cudf::duration_s, cudf::duration_ms>>; }; /** * @brief Utility class to help conversion of timestamps and durations to their * representation type * * @tparam conversion A conversion_map structure */ template <typename conversion> class type_conversion { using type_selector = ConversionTypeSelect<typename conversion::types>; public: template <typename T> using type = typename type_selector::template type<T>; template <typename T> static constexpr __device__ typename type_selector::template type<T> convert(T const& elem) { using Type = typename type_selector::template type<T>; if constexpr (cudf::is_duration<T>()) { return cuda::std::chrono::duration_cast<Type>(elem); } else if constexpr (cudf::is_timestamp<T>()) { using Duration = typename Type::duration; return cuda::std::chrono::time_point_cast<Duration>(elem); } else { return elem; } return Type{}; } }; template <class T> struct dependent_false : std::false_type {}; /** * @brief Utility class to convert a leaf column element into its extrema type * * @tparam T Column type */ template <typename T> class extrema_type { private: using integral_extrema_type = typename std::conditional_t<std::is_signed_v<T>, int64_t, uint64_t>; using arithmetic_extrema_type = typename std::conditional_t<std::is_integral_v<T>, integral_extrema_type, double>; using non_arithmetic_extrema_type = typename std::conditional_t< cudf::is_fixed_point<T>() or cudf::is_duration<T>() or cudf::is_timestamp<T>(), typename std::conditional_t<cudf::is_fixed_point<T>(), __int128_t, int64_t>, typename std::conditional_t< std::is_same_v<T, string_view>, string_view, std::conditional_t<std::is_same_v<T, byte_array_view>, byte_array_view, void>>>; // unsigned int/bool -> uint64_t // signed int -> int64_t // float/double -> double // decimal32/64/128 -> __int128_t // duration_[T] -> int64_t // string_view -> string_view // byte_array_view -> byte_array_view // timestamp_[T] -> int64_t public: // Does type T have an extrema? static constexpr bool is_supported = std::is_arithmetic_v<T> or std::is_same_v<T, string_view> or cudf::is_duration<T>() or cudf::is_timestamp<T>() or cudf::is_fixed_point<T>() or std::is_same_v<T, byte_array_view>; using type = typename std:: conditional_t<std::is_arithmetic_v<T>, arithmetic_extrema_type, non_arithmetic_extrema_type>; /** * @brief Function that converts an element of a leaf column into its extrema type */ __device__ static type convert(T const& val) { if constexpr (std::is_arithmetic_v<T> or std::is_same_v<T, string_view> or std::is_same_v<T, byte_array_view>) { return val; } else if constexpr (cudf::is_fixed_point<T>()) { return val.value(); } else if constexpr (cudf::is_duration<T>()) { return val.count(); } else if constexpr (cudf::is_timestamp<T>()) { return val.time_since_epoch().count(); } else { static_assert(dependent_false<T>::value, "aggregation_type does not exist"); } return type{}; } }; /** * @brief Utility class to convert a leaf column element into its aggregate type * * @tparam T Column type */ template <typename T> class aggregation_type { private: using integral_aggregation_type = typename std::conditional_t<std::is_signed_v<T>, int64_t, uint64_t>; using arithmetic_aggregation_type = typename std::conditional_t<std::is_integral_v<T>, integral_aggregation_type, double>; using non_arithmetic_aggregation_type = typename std::conditional_t< cudf::is_fixed_point<T>() or cudf::is_duration<T>() or cudf::is_timestamp<T>() // To be disabled with static_assert or std::is_same_v<T, string_view> or std::is_same_v<T, byte_array_view>, typename std::conditional_t<std::is_same_v<T, numeric::decimal128>, __int128_t, int64_t>, void>; // unsigned int/bool -> uint64_t // signed int -> int64_t // float/double -> double // decimal32/64 -> int64_t // decimal128 -> __int128_t // duration_[T] -> int64_t // string_view -> int64_t // byte_array -> int64_t // NOTE : timestamps do not have an aggregation type public: // Does type T aggregate? static constexpr bool is_supported = std::is_arithmetic_v<T> or std::is_same_v<T, string_view> or cudf::is_duration<T>() or cudf::is_fixed_point<T>() or std::is_same_v<T, byte_array_view>; using type = typename std::conditional_t<std::is_arithmetic_v<T>, arithmetic_aggregation_type, non_arithmetic_aggregation_type>; /** * @brief Function that converts an element of a leaf column into its aggregate type */ __device__ static type convert(T const& val) { if constexpr (std::is_same_v<T, string_view> or std::is_same_v<T, byte_array_view>) { return val.size_bytes(); } else if constexpr (std::is_integral_v<T>) { return val; } else if constexpr (std::is_floating_point_v<T>) { return isnan(val) ? 0 : val; } else if constexpr (cudf::is_fixed_point<T>()) { return val.value(); } else if constexpr (cudf::is_duration<T>()) { return val.count(); } else if constexpr (cudf::is_timestamp<T>()) { static_assert(dependent_false<T>::value, "aggregation_type for timestamps do not exist"); } else { static_assert(dependent_false<T>::value, "aggregation_type for supplied type do not exist"); } return type{}; } }; template <typename T> __inline__ __device__ constexpr T minimum_identity() { if constexpr (std::is_same_v<T, string_view>) { return string_view::max(); } else if constexpr (std::is_same_v<T, byte_array_view>) { return byte_array_view::max(); } return cuda::std::numeric_limits<T>::max(); } template <typename T> __inline__ __device__ constexpr T maximum_identity() { if constexpr (std::is_same_v<T, string_view>) { return string_view::min(); } else if constexpr (std::is_same_v<T, byte_array_view>) { return byte_array_view::min(); } return cuda::std::numeric_limits<T>::lowest(); } /** * @brief Utility class to identify whether a type T is aggregated or ignored * for ORC or PARQUET * * @tparam T Leaf column type * @tparam IO File format for which statistics calculation is being done */ template <typename T, io_file_format IO> class statistics_type_category { public: // Types that calculate the sum of elements encountered static constexpr bool include_aggregate = (IO == io_file_format::PARQUET) ? false : aggregation_type<T>::is_supported; // Types for which sum does not make sense, but extrema do static constexpr bool include_extrema = aggregation_type<T>::is_supported or cudf::is_timestamp<T>() or (std::is_same_v<T, cudf::list_view> and IO == io_file_format::PARQUET); // Types for which only value count makes sense (e.g. nested) static constexpr bool include_count = (IO == io_file_format::ORC) ? true : include_extrema; // Do not calculate statistics for any other type static constexpr bool ignore = not(include_count); }; } // namespace detail } // namespace io } // namespace cudf
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/statistics/column_statistics.cuh
/* * Copyright (c) 2021-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * @file column_statistics.cuh * @brief Functors for statistics calculation to be used in ORC and PARQUET */ #pragma once #include "temp_storage_wrapper.cuh" #include "typed_statistics_chunk.cuh" #include "statistics.cuh" namespace cudf { namespace io { /** * @brief shared state for statistics calculation kernel */ struct stats_state_s { stats_column_desc col{}; ///< Column information statistics_group group{}; ///< Group description statistics_chunk ck{}; ///< Output statistics chunk }; /** * @brief shared state for statistics merge kernel */ struct merge_state_s { stats_column_desc col{}; ///< Column information statistics_merge_group group{}; ///< Group description statistics_chunk ck{}; ///< Resulting statistics chunk }; template <int dimension> using block_reduce_storage = detail::block_reduce_storage<dimension>; /** * @brief Functor to calculate the statistics of rows in a column belonging to a * statistics group * * @tparam block_size Dimension of the block * @tparam IO File format for which statistics calculation is being done */ template <int block_size, detail::io_file_format IO, detail::is_int96_timestamp INT96 = detail::is_int96_timestamp::YES> struct calculate_group_statistics_functor { block_reduce_storage<block_size>& temp_storage; /** * @brief Construct a statistics calculator * * @param d_temp_storage Temporary storage to be used by cub calls */ __device__ calculate_group_statistics_functor(block_reduce_storage<block_size>& d_temp_storage) : temp_storage(d_temp_storage) { } template <typename T, std::enable_if_t<detail::statistics_type_category<T, IO>::ignore>* = nullptr> __device__ void operator()(stats_state_s&, uint32_t) { // No-op for unsupported aggregation types } template <typename T> __device__ T get_element(stats_state_s const& s, uint32_t row) { return cudf::io::get_element<T>(*s.col.leaf_column, row); } /** * @brief Iterates through the rows specified by statistics group and stores the combined * statistics into the statistics chunk. * * @param s Statistics state which specifies the column, the group being worked and the chunk * the results will be stored into * @param t thread id */ template <typename T, std::enable_if_t<detail::statistics_type_category<T, IO>::include_extrema and (IO != detail::io_file_format::PARQUET or !std::is_same_v<T, list_view>)>* = nullptr> __device__ void operator()(stats_state_s& s, uint32_t t) { // Temporarily disable stats writing for int96 timestamps // TODO: https://github.com/rapidsai/cudf/issues/10438 if constexpr (cudf::is_timestamp<T>() and IO == detail::io_file_format::PARQUET and INT96 == detail::is_int96_timestamp::YES) { return; } detail::storage_wrapper<block_size> storage(temp_storage); using type_convert = detail::type_conversion<detail::conversion_map<IO, INT96>>; using CT = typename type_convert::template type<T>; typed_statistics_chunk<CT, detail::statistics_type_category<T, IO>::include_aggregate> chunk; for (uint32_t i = 0; i < s.group.num_rows; i += block_size) { uint32_t r = i + t; uint32_t row = r + s.group.start_row; if (r < s.group.num_rows) { if (s.col.leaf_column->is_valid(row)) { auto converted_value = type_convert::convert(get_element<T>(s, row)); chunk.reduce(converted_value); } else { chunk.null_count++; } } } chunk = block_reduce(chunk, storage); if (t == 0) { // parquet wants total null count in stats, not just count of null leaf values if constexpr (IO == detail::io_file_format::PARQUET) { chunk.null_count += s.group.non_leaf_nulls; } s.ck = get_untyped_chunk(chunk); } } template <typename T, std::enable_if_t<detail::statistics_type_category<T, IO>::include_extrema and IO == detail::io_file_format::PARQUET and std::is_same_v<T, list_view>>* = nullptr> __device__ void operator()(stats_state_s& s, uint32_t t) { operator()<statistics::byte_array_view>(s, t); } template < typename T, std::enable_if_t<detail::statistics_type_category<T, IO>::include_count and not detail::statistics_type_category<T, IO>::include_extrema>* = nullptr> __device__ void operator()(stats_state_s& s, uint32_t t) { detail::storage_wrapper<block_size> storage(temp_storage); typed_statistics_chunk<uint32_t, false> chunk; for (uint32_t i = 0; i < s.group.num_rows; i += block_size) { uint32_t r = i + t; uint32_t row = r + s.group.start_row; if (r < s.group.num_rows) { if (s.col.leaf_column->is_valid(row)) { chunk.non_nulls++; } else { chunk.null_count++; } } } cub::BlockReduce<uint32_t, block_size>(storage.template get<uint32_t>()).Sum(chunk.non_nulls); if (t == 0) { s.ck = get_untyped_chunk(chunk); } } }; /** * @brief Functor to merge the statistics chunks of a column belonging to a * merge group * * @tparam block_size Dimension of the block * @tparam IO File format for which statistics calculation is being done */ template <int block_size, detail::io_file_format IO> struct merge_group_statistics_functor { block_reduce_storage<block_size>& temp_storage; __device__ merge_group_statistics_functor(block_reduce_storage<block_size>& d_temp_storage) : temp_storage(d_temp_storage) { } template <typename T, std::enable_if_t<detail::statistics_type_category<T, IO>::ignore>* = nullptr> __device__ void operator()(merge_state_s& s, statistics_chunk const* chunks, uint32_t const num_chunks, uint32_t t) { // No-op for unsupported aggregation types } template <typename T, std::enable_if_t<detail::statistics_type_category<T, IO>::include_extrema and (IO == detail::io_file_format::ORC or !std::is_same_v<T, list_view>)>* = nullptr> __device__ void operator()(merge_state_s& s, statistics_chunk const* chunks, uint32_t const num_chunks, uint32_t t) { detail::storage_wrapper<block_size> storage(temp_storage); typed_statistics_chunk<T, detail::statistics_type_category<T, IO>::include_aggregate> chunk; for (uint32_t i = t; i < num_chunks; i += block_size) { chunk.reduce(chunks[i]); } chunk.has_minmax = (chunk.minimum_value <= chunk.maximum_value); chunk = block_reduce(chunk, storage); if (t == 0) { s.ck = get_untyped_chunk(chunk); } } template <typename T, std::enable_if_t<detail::statistics_type_category<T, IO>::include_extrema and IO == detail::io_file_format::PARQUET and std::is_same_v<T, list_view>>* = nullptr> __device__ void operator()(merge_state_s& s, statistics_chunk const* chunks, uint32_t const num_chunks, uint32_t t) { operator()<statistics::byte_array_view>(s, chunks, num_chunks, t); } template < typename T, std::enable_if_t<detail::statistics_type_category<T, IO>::include_count and not detail::statistics_type_category<T, IO>::include_extrema>* = nullptr> __device__ void operator()(merge_state_s& s, statistics_chunk const* chunks, uint32_t const num_chunks, uint32_t t) { detail::storage_wrapper<block_size> storage(temp_storage); typed_statistics_chunk<uint32_t, false> chunk; for (uint32_t i = t; i < num_chunks; i += block_size) { chunk.reduce(chunks[i]); } chunk = block_reduce(chunk, storage); if (t == 0) { s.ck = get_untyped_chunk(chunk); } } }; /** * @brief Function to cooperatively load an object from a pointer * * If the pointer is nullptr then the members of the object are set to 0 * * @param[out] destination Object being loaded * @param[in] source Source object * @tparam T Type of object */ template <typename T> __device__ void cooperative_load(T& destination, T const* source = nullptr) { using load_type = std::conditional_t<((sizeof(T) % sizeof(uint32_t)) == 0), uint32_t, uint8_t>; if (source == nullptr) { for (auto i = threadIdx.x; i < (sizeof(T) / sizeof(load_type)); i += blockDim.x) { reinterpret_cast<load_type*>(&destination)[i] = load_type{0}; } } else { for (auto i = threadIdx.x; i < sizeof(T) / sizeof(load_type); i += blockDim.x) { reinterpret_cast<load_type*>(&destination)[i] = reinterpret_cast<load_type const*>(source)[i]; } } } /** * @brief Kernel to calculate group statistics * * @param[out] chunks Statistics results [num_chunks] * @param[in] groups Statistics row groups [num_chunks] * @tparam block_size Dimension of the block * @tparam IO File format for which statistics calculation is being done */ template <int block_size, detail::io_file_format IO> __global__ void __launch_bounds__(block_size, 1) gpu_calculate_group_statistics(statistics_chunk* chunks, statistics_group const* groups, bool const int96_timestamps) { __shared__ __align__(8) stats_state_s state; __shared__ block_reduce_storage<block_size> storage; // Load state members cooperative_load(state.group, &groups[blockIdx.x]); cooperative_load(state.ck); __syncthreads(); cooperative_load(state.col, state.group.col); __syncthreads(); // Calculate statistics if constexpr (IO == detail::io_file_format::PARQUET) { // Do not convert ns to us for int64 timestamps if (not int96_timestamps) { type_dispatcher( state.col.leaf_column->type(), calculate_group_statistics_functor<block_size, IO, detail::is_int96_timestamp::NO>(storage), state, threadIdx.x); } // Temporarily disable stats writing for int96 timestamps // TODO: https://github.com/rapidsai/cudf/issues/10438 else { type_dispatcher( state.col.leaf_column->type(), calculate_group_statistics_functor<block_size, IO, detail::is_int96_timestamp::YES>( storage), state, threadIdx.x); } } else { type_dispatcher(state.col.leaf_column->type(), calculate_group_statistics_functor<block_size, IO>(storage), state, threadIdx.x); } __syncthreads(); cooperative_load(chunks[blockIdx.x], &state.ck); } namespace detail { /** * @brief Launches kernel to calculate group statistics * * @param[out] chunks Statistics results [num_chunks] * @param[in] groups Statistics row groups [num_chunks] * @param[in] num_chunks Number of chunks & rowgroups * @param[in] stream CUDA stream to use * @tparam IO File format for which statistics calculation is being done */ template <detail::io_file_format IO> void calculate_group_statistics(statistics_chunk* chunks, statistics_group const* groups, uint32_t num_chunks, rmm::cuda_stream_view stream, bool const int96_timestamps = false) { constexpr int block_size = 256; gpu_calculate_group_statistics<block_size, IO> <<<num_chunks, block_size, 0, stream.value()>>>(chunks, groups, int96_timestamps); } /** * @brief Kernel to merge column statistics * * @param[out] chunks_out Statistics results [num_chunks] * @param[in] chunks_in Input statistics * @param[in] groups Statistics groups [num_chunks] * @tparam block_size Dimension of the block * @tparam IO File format for which statistics calculation is being done */ template <int block_size, detail::io_file_format IO> __global__ void __launch_bounds__(block_size, 1) gpu_merge_group_statistics(statistics_chunk* chunks_out, statistics_chunk const* chunks_in, statistics_merge_group const* groups) { __shared__ __align__(8) merge_state_s state; __shared__ block_reduce_storage<block_size> storage; cooperative_load(state.group, &groups[blockIdx.x]); __syncthreads(); type_dispatcher(state.group.col_dtype, merge_group_statistics_functor<block_size, IO>(storage), state, chunks_in + state.group.start_chunk, state.group.num_chunks, threadIdx.x); __syncthreads(); cooperative_load(chunks_out[blockIdx.x], &state.ck); } /** * @brief Launches kernel to merge column statistics * * @param[out] chunks_out Statistics results [num_chunks] * @param[in] chunks_in Input statistics * @param[in] groups Statistics groups [num_chunks] * @param[in] num_chunks Number of chunks & groups * @param[in] stream CUDA stream to use * @tparam IO File format for which statistics calculation is being done */ template <detail::io_file_format IO> void merge_group_statistics(statistics_chunk* chunks_out, statistics_chunk const* chunks_in, statistics_merge_group const* groups, uint32_t num_chunks, rmm::cuda_stream_view stream) { constexpr int block_size = 256; gpu_merge_group_statistics<block_size, IO> <<<num_chunks, block_size, 0, stream.value()>>>(chunks_out, chunks_in, groups); } } // namespace detail } // namespace io } // namespace cudf
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/statistics/conversion_type_select.cuh
/* * Copyright (c) 2021, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * @file conversion_type_select.cuh * @brief Utility classes for timestamp and duration conversion for PARQUET and ORC */ #pragma once #include <tuple> #include <type_traits> #include <utility> namespace cudf { namespace io { namespace detail { template <int, int, typename> class DetectInnerIteration; template <int N0, typename... T> class DetectInnerIteration<N0, 0, std::tuple<T...>> { public: static constexpr bool is_duplicate = std::is_same_v<typename std::tuple_element<N0, std::tuple<T...>>::type, typename std::tuple_element<0, std::tuple<T...>>::type>; }; template <int N0, int N1, typename... T> class DetectInnerIteration<N0, N1, std::tuple<T...>> { public: static constexpr bool is_duplicate = std::is_same_v<typename std::tuple_element<N0, std::tuple<T...>>::type, typename std::tuple_element<N1, std::tuple<T...>>::type> || DetectInnerIteration<N0, N1 - 1, std::tuple<T...>>::is_duplicate; }; template <int, typename> class DetectIteration; template <typename... T> class DetectIteration<0, std::tuple<T...>> { public: static constexpr bool is_duplicate = false; }; template <int N, typename... T> class DetectIteration<N, std::tuple<T...>> { public: static constexpr bool is_duplicate = DetectInnerIteration<N, N - 1, std::tuple<T...>>::is_duplicate || DetectIteration<N - 1, std::tuple<T...>>::is_duplicate; }; template <typename> class Detect; /** * @brief Utility class to detect multiple occurrences of a type in the first element of pairs in a * tuple For eg. with the following tuple : * * using conversion_types = * std::tuple< * std::pair<int, A>, * std::pair<char, B>, * std::pair<int, C>, * std::pair<int, D>, * std::pair<unsigned, E>, * std::pair<unsigned, F>>; * * Detect<conversion_types>::is_duplicate will evaluate to true at compile time. * Here std::pair<int, A>, std::pair<int, C> and std::pair<int, D> are treated as duplicates * and std::pair<unsigned, E> and std::pair<unsigned, F>> are treated as duplicates. * * @tparam T... Parameter pack of pairs of types */ template <typename... T> class Detect<std::tuple<T...>> { public: static constexpr bool is_duplicate = DetectIteration<(sizeof...(T) - 1), std::tuple<T...>>::is_duplicate; }; template <typename> class ConversionTypeSelect; template <typename I0> class ConversionTypeSelect<std::tuple<I0>> { public: template <typename T> using type = std::conditional_t<std::is_same_v<T, typename std::tuple_element<0, I0>::type>, typename std::tuple_element<1, I0>::type, T>; }; /** * @brief Utility to select between types based on an input type * * using Conversion = std::tuple< * std::pair<cudf::timestamp_s, cudf::timestamp_ms>, * std::pair<cudf::timestamp_ns, cudf::timestamp_us>, * std::pair<cudf::duration_s, cudf::duration_ms>, * std::pair<cudf::duration_ns, cudf::duration_us>> * * using type = ConversionTypeSelect<Conversion>::type<cudf::duration_ns> * Here type will resolve to cudf::duration_us * If the type passed does not match any entries the type is returned as it is * This utility takes advantage of Detect class to reject any tuple with duplicate first * entries at compile time * * @tparam T... Parameter pack of pairs of types */ template <typename I0, typename... In> class ConversionTypeSelect<std::tuple<I0, In...>> { public: template <typename T> using type = std::conditional_t<std::is_same_v<T, typename std::tuple_element<0, I0>::type>, typename std::tuple_element<1, I0>::type, typename ConversionTypeSelect<std::tuple<In...>>::template type<T>>; static_assert(not Detect<std::tuple<I0, In...>>::is_duplicate, "Type tuple has duplicate first entries"); }; } // namespace detail } // namespace io } // namespace cudf
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/statistics/orc_column_statistics.cu
/* * Copyright (c) 2021-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * @file orc_column_statistics.cu * @brief Template specialization for ORC statistics calls */ #include "column_statistics.cuh" namespace cudf { namespace io { namespace detail { template <> void merge_group_statistics<detail::io_file_format::ORC>(statistics_chunk* chunks_out, statistics_chunk const* chunks_in, statistics_merge_group const* groups, uint32_t num_chunks, rmm::cuda_stream_view stream); template <> void calculate_group_statistics<detail::io_file_format::ORC>(statistics_chunk* chunks, statistics_group const* groups, uint32_t num_chunks, rmm::cuda_stream_view stream, bool int96_timestamp); } // namespace detail } // namespace io } // namespace cudf
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/statistics/typed_statistics_chunk.cuh
/* * Copyright (c) 2021-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * @file typed_statistics_chunk.cuh * @brief Templated wrapper to generalize statistics chunk reduction and aggregation * across different leaf column types */ #pragma once #include "byte_array_view.cuh" #include "statistics.cuh" #include "statistics_type_identification.cuh" #include "temp_storage_wrapper.cuh" #include <cudf/fixed_point/fixed_point.hpp> #include <cudf/wrappers/timestamps.hpp> #include <math_constants.h> #include <thrust/extrema.h> namespace cudf { namespace io { /** * @brief Class used to get reference to members of unions related to statistics calculations */ class union_member { template <typename U, typename V> using reference_type = std::conditional_t<std::is_const_v<U>, V const&, V&>; public: template <typename T, typename U> using type = std::conditional_t< std::is_same_v<std::remove_cv_t<T>, string_view>, reference_type<U, string_stats>, std::conditional_t<std::is_same_v<std::remove_cv_t<T>, statistics::byte_array_view>, reference_type<U, byte_array_stats>, reference_type<U, T>>>; template <typename T, typename U> __device__ static std::enable_if_t<std::is_integral_v<T> and std::is_unsigned_v<T>, type<T, U>> get(U& val) { return val.u_val; } template <typename T, typename U> __device__ static std::enable_if_t<std::is_integral_v<T> and std::is_signed_v<T>, type<T, U>> get( U& val) { return val.i_val; } template <typename T, typename U> __device__ static std::enable_if_t<std::is_same_v<T, __int128_t>, type<T, U>> get(U& val) { return val.d128_val; } template <typename T, typename U> __device__ static std::enable_if_t<std::is_floating_point_v<T>, type<T, U>> get(U& val) { return val.fp_val; } template <typename T, typename U> __device__ static std::enable_if_t<std::is_same_v<T, string_view>, type<T, U>> get(U& val) { return val.str_val; } template <typename T, typename U> __device__ static std::enable_if_t<std::is_same_v<T, statistics::byte_array_view>, type<T, U>> get(U& val) { return val.byte_val; } }; /** * @brief Templated structure used for merging and gathering of statistics chunks * * This uses the reduce function to compute the minimum, maximum and aggregate * values simultaneously. * * @tparam T The input type associated with the chunk * @tparam is_aggregation_supported Set to true if input type is meant to be aggregated */ template <typename T, bool is_aggregation_supported> struct typed_statistics_chunk {}; template <typename T> struct typed_statistics_chunk<T, true> { using E = typename detail::extrema_type<T>::type; using A = typename detail::aggregation_type<T>::type; uint32_t non_nulls{0}; //!< number of non-null values in chunk uint32_t null_count{0}; //!< number of null values in chunk E minimum_value; E maximum_value; A aggregate; uint8_t has_minmax{false}; //!< Nonzero if min_value and max_values are valid uint8_t has_sum{false}; //!< Nonzero if sum is valid __device__ typed_statistics_chunk() : minimum_value(detail::minimum_identity<E>()), maximum_value(detail::maximum_identity<E>()), aggregate(0) { } __device__ void reduce(T const& elem) { non_nulls++; minimum_value = thrust::min<E>(minimum_value, detail::extrema_type<T>::convert(elem)); maximum_value = thrust::max<E>(maximum_value, detail::extrema_type<T>::convert(elem)); aggregate += detail::aggregation_type<T>::convert(elem); has_minmax = true; } __device__ void reduce(statistics_chunk const& chunk) { if (chunk.has_minmax) { minimum_value = thrust::min<E>(minimum_value, union_member::get<E>(chunk.min_value)); maximum_value = thrust::max<E>(maximum_value, union_member::get<E>(chunk.max_value)); } if (chunk.has_sum) { aggregate += union_member::get<A>(chunk.sum); } non_nulls += chunk.non_nulls; null_count += chunk.null_count; } }; template <typename T> struct typed_statistics_chunk<T, false> { using E = typename detail::extrema_type<T>::type; uint32_t non_nulls{0}; //!< number of non-null values in chunk uint32_t null_count{0}; //!< number of null values in chunk E minimum_value; E maximum_value; uint8_t has_minmax{false}; //!< Nonzero if min_value and max_values are valid uint8_t has_sum{false}; //!< Nonzero if sum is valid __device__ typed_statistics_chunk() : minimum_value(detail::minimum_identity<E>()), maximum_value(detail::maximum_identity<E>()) { } __device__ void reduce(T const& elem) { non_nulls++; minimum_value = thrust::min<E>(minimum_value, detail::extrema_type<T>::convert(elem)); maximum_value = thrust::max<E>(maximum_value, detail::extrema_type<T>::convert(elem)); has_minmax = true; } __device__ void reduce(statistics_chunk const& chunk) { if (chunk.has_minmax) { minimum_value = thrust::min<E>(minimum_value, union_member::get<E>(chunk.min_value)); maximum_value = thrust::max<E>(maximum_value, union_member::get<E>(chunk.max_value)); } non_nulls += chunk.non_nulls; null_count += chunk.null_count; } }; /** * @brief Function to reduce members of a typed_statistics_chunk across a thread block * * @tparam T Type associated with typed_statistics_chunk * @tparam block_size Dimension of the thread block * @param chunk The input typed_statistics_chunk * @param storage Temporary storage to be used by cub calls */ template <typename T, bool include_aggregate, int block_size> __inline__ __device__ typed_statistics_chunk<T, include_aggregate> block_reduce( typed_statistics_chunk<T, include_aggregate>& chunk, detail::storage_wrapper<block_size>& storage) { typed_statistics_chunk<T, include_aggregate> output_chunk = chunk; using E = typename detail::extrema_type<T>::type; using extrema_reduce = cub::BlockReduce<E, block_size>; using count_reduce = cub::BlockReduce<uint32_t, block_size>; output_chunk.minimum_value = extrema_reduce(storage.template get<E>()).Reduce(output_chunk.minimum_value, cub::Min()); __syncthreads(); output_chunk.maximum_value = extrema_reduce(storage.template get<E>()).Reduce(output_chunk.maximum_value, cub::Max()); __syncthreads(); output_chunk.non_nulls = count_reduce(storage.template get<uint32_t>()).Sum(output_chunk.non_nulls); __syncthreads(); output_chunk.null_count = count_reduce(storage.template get<uint32_t>()).Sum(output_chunk.null_count); __syncthreads(); output_chunk.has_minmax = __syncthreads_or(output_chunk.has_minmax); // FIXME : Is another syncthreads needed here? if constexpr (include_aggregate) { if (output_chunk.has_minmax) { using A = typename detail::aggregation_type<T>::type; using aggregate_reduce = cub::BlockReduce<A, block_size>; output_chunk.aggregate = aggregate_reduce(storage.template get<A>()).Sum(output_chunk.aggregate); } } return output_chunk; } /** * @brief Function to convert typed_statistics_chunk into statistics_chunk * * @tparam T Type associated with typed_statistics_chunk * @param chunk The input typed_statistics_chunk */ template <typename T, bool include_aggregate> __inline__ __device__ statistics_chunk get_untyped_chunk(typed_statistics_chunk<T, include_aggregate> const& chunk) { using E = typename detail::extrema_type<T>::type; statistics_chunk stat{}; stat.non_nulls = chunk.non_nulls; stat.null_count = chunk.null_count; stat.has_minmax = chunk.has_minmax; stat.has_sum = [&]() { // invalidate the sum if overflow or underflow is possible if constexpr (std::is_floating_point_v<E> or std::is_integral_v<E>) { if (!chunk.has_minmax) { return true; } return std::numeric_limits<E>::max() / chunk.non_nulls >= static_cast<E>(chunk.maximum_value) and std::numeric_limits<E>::lowest() / chunk.non_nulls <= static_cast<E>(chunk.minimum_value); } return true; }(); if (chunk.has_minmax) { if constexpr (std::is_floating_point_v<E>) { union_member::get<E>(stat.min_value) = (chunk.minimum_value != 0.0) ? chunk.minimum_value : CUDART_NEG_ZERO; union_member::get<E>(stat.max_value) = (chunk.maximum_value != 0.0) ? chunk.maximum_value : CUDART_ZERO; } else { union_member::get<E>(stat.min_value) = chunk.minimum_value; union_member::get<E>(stat.max_value) = chunk.maximum_value; } if constexpr (include_aggregate) { using A = typename detail::aggregation_type<T>::type; union_member::get<A>(stat.sum) = chunk.aggregate; } } return stat; } } // namespace io } // namespace cudf
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/statistics/byte_array_view.cuh
/* * Copyright (c) 2022-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #pragma once #include <cudf/utilities/span.hpp> namespace cudf::io::statistics { /** * @brief Wrapper for a row of a list<int8> or list<uint8> column. This is analogous to * `string_view` in type. It was created due to the need for comparison operators for cub reduce on * statistics. Otherwise, it is a device_span in all but name. * */ class byte_array_view { public: using element_type = std::byte const; ///< The type of the elements in the byte array constexpr byte_array_view() noexcept {} /** * @brief Constructs a byte_array_view from a pointer and a size. * * @param data Pointer to the first element in the byte array. * @param size The number of elements in the byte array. */ constexpr byte_array_view(element_type* data, std::size_t size) : _data(data, size) {} constexpr byte_array_view(byte_array_view const&) noexcept = default; ///< Copy constructor /** * @brief Copy assignment operator. * * @return Reference to this byte_array_view. */ constexpr byte_array_view& operator=(byte_array_view const&) noexcept = default; /** * @brief Returns a reference to the idx-th element of the byte_array_view. * * The behavior is undefined if idx is out of range (i.e., if it is greater than or equal to * size()). * * @param idx The index of the element to access. * @return A reference to the idx-th element of the byte_array_view, i.e., `_data.data()[idx]`. */ [[nodiscard]] constexpr element_type& operator[](std::size_t idx) const { return _data[idx]; } /** * @brief Returns a pointer to the beginning of the byte_array_view. * * @return A pointer to the first element of the byte_array_view. */ [[nodiscard]] constexpr element_type* data() const noexcept { return _data.data(); } /** * @brief Returns the number of elements in the byte_array_view. * * @return The number of elements in the byte_array_view. */ [[nodiscard]] constexpr std::size_t size() const noexcept { return _data.size(); } /** * @brief Returns the size of the byte_array_view in bytes. * * @return The size of the byte_array_view in bytes */ [[nodiscard]] constexpr std::size_t size_bytes() const noexcept { return _data.size_bytes(); } /** * @brief Comparing target byte_array_view with this byte_array_view. Each byte in the array is * compared. * * @param byte_array_view Target byte_array_view to compare with this byte_array_view. * @return 0 If they compare equal. * <0 Either the value of the first byte of this byte_array_view that does not match is * lower in the arg byte_array_view, or all compared bytes match but the arg byte_array_view is * shorter. >0 Either the value of the first byte of this byte_array_view that does not match is * greater in the arg byte_array_view, or all compared bytes match but the arg byte_array_view is * longer. */ [[nodiscard]] __device__ inline int32_t compare(byte_array_view const& rhs) const { auto const len1 = size_bytes(); auto const len2 = rhs.size_bytes(); auto const* ptr1 = this->data(); auto const* ptr2 = rhs.data(); if ((ptr1 == ptr2) && (len1 == len2)) { return 0; } // if I am max, I am greater than the argument if (ptr1 == nullptr && len1 == std::numeric_limits<std::size_t>::max()) { return 1; } // if the argument is max, it is greater than me if (ptr2 == nullptr && len2 == std::numeric_limits<std::size_t>::max()) { return -1; } std::size_t idx = 0; for (; (idx < len1) && (idx < len2); ++idx) { if (ptr1[idx] != ptr2[idx]) { return static_cast<int32_t>(ptr1[idx]) - static_cast<int32_t>(ptr2[idx]); } } // if the argument ran out of data, it is less than me if (idx < len1) return 1; // if I ran out of data first, I am less than the argument if (idx < len2) return -1; return 0; } /** * @brief Returns true if this byte_array_view is ordered before rhs. * * @param rhs Target byte_array_view to compare with this byte_array_view. * @return true if this byte_array_view is ordered before rhs */ [[nodiscard]] __device__ inline bool operator<(byte_array_view const& rhs) const { return compare(rhs) < 0; } /** * @brief Returns true if rhs is ordered before this byte_array_view. * * @param rhs Target byte_array_view to compare with this byte_array_view. * @return true if rhs is ordered before this byte_array_view */ [[nodiscard]] __device__ inline bool operator>(byte_array_view const& rhs) const { return compare(rhs) > 0; } /** * @brief Returns true if this byte_array_view is ordered before rhs. * * @param rhs Target byte_array_view to compare with this byte_array_view. * @return true if this byte_array_view is ordered before rhs */ [[nodiscard]] __device__ inline bool operator<=(byte_array_view const& rhs) const { return compare(rhs) <= 0; } /** * @brief Returns true if rhs is ordered before this byte_array_view. * * @param rhs Target byte_array_view to compare with this byte_array_view. * @return true if rhs is ordered before this byte_array_view */ [[nodiscard]] __device__ inline bool operator>=(byte_array_view const& rhs) const { return compare(rhs) >= 0; } /** * @brief Return minimum value associated with the byte_array_view type * * @return An empty byte_array_view */ [[nodiscard]] __device__ inline static byte_array_view min() { return byte_array_view(); } /** * @brief Return a byte_array_view to interpret as maximum value * * @return A byte_array_view value which represents the largest possible byte_array_view */ [[nodiscard]] __device__ inline static byte_array_view max() { return byte_array_view(nullptr, std::numeric_limits<std::size_t>::max()); } private: device_span<element_type> _data{}; }; } // namespace cudf::io::statistics
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/utilities/file_io_utilities.hpp
/* * Copyright (c) 2021-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #pragma once #ifdef CUFILE_FOUND #include "thread_pool.hpp" #include <cudf_test/file_utilities.hpp> #include <cufile.h> #endif #include <rmm/cuda_stream_view.hpp> #include <cudf/io/datasource.hpp> #include <cudf/utilities/error.hpp> #include <string> namespace cudf { namespace io { namespace detail { /** * @brief Class that provides RAII for file handling. */ class file_wrapper { int fd = -1; size_t _size; public: explicit file_wrapper(std::string const& filepath, int flags); explicit file_wrapper(std::string const& filepath, int flags, mode_t mode); ~file_wrapper(); [[nodiscard]] auto size() const { return _size; } [[nodiscard]] auto desc() const { return fd; } }; /** * @brief Interface class for cufile input. */ class cufile_input { public: /** * @brief Asynchronously reads into existing device memory. * * @throws cudf::logic_error on cuFile error * * @param offset Number of bytes from the start * @param size Number of bytes to read * @param dst Address of the existing device memory * @param stream CUDA stream to use * * @return The number of bytes read as an std::future */ virtual std::future<size_t> read_async(size_t offset, size_t size, uint8_t* dst, rmm::cuda_stream_view stream) = 0; }; /** * @brief Interface class for cufile output. */ class cufile_output { public: /** * @brief Asynchronously writes the data from a device buffer into a file. * * It is the caller's responsibility to not invalidate `data` until the result from this function * is synchronized. * * @throws cudf::logic_error on cuFile error * * @param data Pointer to the buffer to be written into the output file * @param offset Number of bytes from the start * @param size Number of bytes to write */ virtual std::future<void> write_async(void const* data, size_t offset, size_t size) = 0; }; #ifdef CUFILE_FOUND class cufile_shim; /** * @brief Class that provides RAII for cuFile file registration. */ struct cufile_registered_file { void register_handle(); public: cufile_registered_file(cufile_shim const* shim, std::string const& filepath, int flags) : _file(filepath, flags), shim{shim} { register_handle(); } cufile_registered_file(cufile_shim const* shim, std::string const& filepath, int flags, mode_t mode) : _file(filepath, flags, mode), shim{shim} { register_handle(); } [[nodiscard]] auto const& handle() const noexcept { return cf_handle; } ~cufile_registered_file(); private: file_wrapper const _file; CUfileHandle_t cf_handle = nullptr; cufile_shim const* shim = nullptr; }; /** * @brief Adapter for the `cuFileRead` API. * * Exposes APIs to read directly from a file into device memory. */ class cufile_input_impl final : public cufile_input { public: cufile_input_impl(std::string const& filepath); std::future<size_t> read_async(size_t offset, size_t size, uint8_t* dst, rmm::cuda_stream_view stream) override; private: cufile_shim const* shim = nullptr; cufile_registered_file const cf_file; cudf::detail::thread_pool pool; }; /** * @brief Adapter for the `cuFileWrite` API. * * Exposes an API to write directly into a file from device memory. */ class cufile_output_impl final : public cufile_output { public: cufile_output_impl(std::string const& filepath); std::future<void> write_async(void const* data, size_t offset, size_t size) override; private: cufile_shim const* shim = nullptr; cufile_registered_file const cf_file; cudf::detail::thread_pool pool; }; #else class cufile_input_impl final : public cufile_input { public: cufile_input_impl(std::string const& filepath); std::future<size_t> read_async(size_t offset, size_t size, uint8_t* dst, rmm::cuda_stream_view stream) override { CUDF_FAIL("Only used to compile without cufile library, should not be called"); } }; class cufile_output_impl final : public cufile_output { public: cufile_output_impl(std::string const& filepath); std::future<void> write_async(void const* data, size_t offset, size_t size) override { CUDF_FAIL("Only used to compile without cufile library, should not be called"); } }; #endif /** * @brief Creates a `cufile_input_impl` object * * Returns a null pointer if an exception occurs in the `cufile_input_impl` constructor, or if the * cuFile library is not installed. */ std::unique_ptr<cufile_input_impl> make_cufile_input(std::string const& filepath); /** * @brief Creates a `cufile_output_impl` object * * Returns a null pointer if an exception occurs in the `cufile_output_impl` constructor, or if the * cuFile library is not installed. */ std::unique_ptr<cufile_output_impl> make_cufile_output(std::string const& filepath); /** * @brief Byte range to be read/written in a single operation. */ struct file_io_slice { size_t offset; size_t size; }; /** * @brief Split the total number of bytes to read/write into slices to enable parallel IO. * * If `max_slice_size` is below 1024, 1024 will be used instead to prevent potential misuse. */ std::vector<file_io_slice> make_file_io_slices(size_t size, size_t max_slice_size); } // namespace detail } // namespace io } // namespace cudf
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/utilities/block_utils.cuh
/* * Copyright (c) 2019-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #pragma once #include <cstdint> namespace cudf { namespace io { template <typename T> inline __device__ T shuffle(T var, int lane = 0) { return __shfl_sync(~0, var, lane); } template <typename T> inline __device__ T shuffle_xor(T var, uint32_t delta) { return __shfl_xor_sync(~0, var, delta); } inline __device__ void syncwarp() { __syncwarp(); } inline __device__ uint32_t ballot(int pred) { return __ballot_sync(~0, pred); } // Warp reduction helpers template <typename T> inline __device__ T WarpReduceOr2(T acc) { return acc | shuffle_xor(acc, 1); } template <typename T> inline __device__ T WarpReduceOr4(T acc) { acc = WarpReduceOr2(acc); return acc | shuffle_xor(acc, 2); } template <typename T> inline __device__ T WarpReduceOr8(T acc) { acc = WarpReduceOr4(acc); return acc | shuffle_xor(acc, 4); } template <typename T> inline __device__ T WarpReduceOr16(T acc) { acc = WarpReduceOr8(acc); return acc | shuffle_xor(acc, 8); } template <typename T> inline __device__ T WarpReduceOr32(T acc) { acc = WarpReduceOr16(acc); return acc | shuffle_xor(acc, 16); } template <typename T> inline __device__ T WarpReducePos2(T pos, uint32_t t) { T tmp = shuffle(pos, t & 0x1e); pos += (t & 1) ? tmp : 0; return pos; } template <typename T> inline __device__ T WarpReducePos4(T pos, uint32_t t) { T tmp; pos = WarpReducePos2(pos, t); tmp = shuffle(pos, (t & 0x1c) | 1); pos += (t & 2) ? tmp : 0; return pos; } template <typename T> inline __device__ T WarpReducePos8(T pos, uint32_t t) { T tmp; pos = WarpReducePos4(pos, t); tmp = shuffle(pos, (t & 0x18) | 3); pos += (t & 4) ? tmp : 0; return pos; } template <typename T> inline __device__ T WarpReducePos16(T pos, uint32_t t) { T tmp; pos = WarpReducePos8(pos, t); tmp = shuffle(pos, (t & 0x10) | 7); pos += (t & 8) ? tmp : 0; return pos; } template <typename T> inline __device__ T WarpReducePos32(T pos, uint32_t t) { T tmp; pos = WarpReducePos16(pos, t); tmp = shuffle(pos, 0xf); pos += (t & 16) ? tmp : 0; return pos; } inline __device__ double Int128ToDouble_rn(uint64_t lo, int64_t hi) { double sign; if (hi < 0) { sign = -1.0; lo = (~lo) + 1; hi = (~hi) + (lo == 0); } else { sign = 1.0; } return sign * __fma_rn(__ll2double_rn(hi), 4294967296.0 * 4294967296.0, __ull2double_rn(lo)); } inline __device__ uint32_t unaligned_load32(uint8_t const* p) { uint32_t ofs = 3 & reinterpret_cast<uintptr_t>(p); auto const* p32 = reinterpret_cast<uint32_t const*>(p - ofs); uint32_t v = p32[0]; return (ofs) ? __funnelshift_r(v, p32[1], ofs * 8) : v; } inline __device__ uint64_t unaligned_load64(uint8_t const* p) { uint32_t ofs = 3 & reinterpret_cast<uintptr_t>(p); auto const* p32 = reinterpret_cast<uint32_t const*>(p - ofs); uint32_t v0 = p32[0]; uint32_t v1 = p32[1]; if (ofs) { v0 = __funnelshift_r(v0, v1, ofs * 8); v1 = __funnelshift_r(v1, p32[2], ofs * 8); } return (((uint64_t)v1) << 32) | v0; } template <unsigned int nthreads, bool sync_before_store> inline __device__ void memcpy_block(void* dstv, void const* srcv, uint32_t len, uint32_t t) { auto* dst = static_cast<uint8_t*>(dstv); auto const* src = static_cast<uint8_t const*>(srcv); uint32_t dst_align_bytes, src_align_bytes, src_align_bits; // Align output to 32-bit dst_align_bytes = 3 & -reinterpret_cast<intptr_t>(dst); if (dst_align_bytes != 0) { uint32_t align_len = min(dst_align_bytes, len); uint8_t b; if (t < align_len) { b = src[t]; } if (sync_before_store) { __syncthreads(); } if (t < align_len) { dst[t] = b; } src += align_len; dst += align_len; len -= align_len; } src_align_bytes = (uint32_t)(3 & reinterpret_cast<uintptr_t>(src)); src_align_bits = src_align_bytes * 8; while (len >= 4) { auto const* src32 = reinterpret_cast<uint32_t const*>(src - src_align_bytes); uint32_t copy_cnt = min(len >> 2, nthreads); uint32_t v; if (t < copy_cnt) { v = src32[t]; if (src_align_bits != 0) { v = __funnelshift_r(v, src32[t + 1], src_align_bits); } } if (sync_before_store) { __syncthreads(); } if (t < copy_cnt) { reinterpret_cast<uint32_t*>(dst)[t] = v; } src += copy_cnt * 4; dst += copy_cnt * 4; len -= copy_cnt * 4; } if (len != 0) { uint8_t b; if (t < len) { b = src[t]; } if (sync_before_store) { __syncthreads(); } if (t < len) { dst[t] = b; } } } } // namespace io } // namespace cudf
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/utilities/parsing_utils.cuh
/* * Copyright (c) 2020-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #pragma once #include <io/csv/datetime.cuh> #include <io/utilities/trie.cuh> #include <cudf/io/types.hpp> #include <cudf/lists/list_view.hpp> #include <cudf/strings/detail/convert/fixed_point.cuh> #include <cudf/strings/string_view.cuh> #include <cudf/structs/struct_view.hpp> #include <cudf/utilities/span.hpp> #include <cudf/utilities/traits.hpp> #include "column_type_histogram.hpp" #include <rmm/device_uvector.hpp> #include <thrust/execution_policy.h> #include <thrust/iterator/reverse_iterator.h> #include <thrust/mismatch.h> #include <optional> using cudf::device_span; namespace cudf { namespace io { /** * @brief Non-owning view for json type inference options */ struct json_inference_options_view { char quote_char; cudf::detail::trie_view trie_true; cudf::detail::trie_view trie_false; cudf::detail::trie_view trie_na; }; /** * @brief Structure for holding various options used when parsing and * converting CSV/json data to cuDF data type values. */ struct parse_options_view { char delimiter; char terminator; char quotechar; char decimal; char thousands; char comment; bool keepquotes; bool doublequote; bool dayfirst; bool skipblanklines; cudf::detail::trie_view trie_true; cudf::detail::trie_view trie_false; cudf::detail::trie_view trie_na; bool multi_delimiter; }; struct parse_options { char delimiter; char terminator; char quotechar; char decimal; char thousands; char comment; bool keepquotes; bool doublequote; bool dayfirst; bool skipblanklines; cudf::detail::optional_trie trie_true; cudf::detail::optional_trie trie_false; cudf::detail::optional_trie trie_na; bool multi_delimiter; [[nodiscard]] json_inference_options_view json_view() const { return {quotechar, cudf::detail::make_trie_view(trie_true), cudf::detail::make_trie_view(trie_false), cudf::detail::make_trie_view(trie_na)}; } [[nodiscard]] parse_options_view view() const { return {delimiter, terminator, quotechar, decimal, thousands, comment, keepquotes, doublequote, dayfirst, skipblanklines, cudf::detail::make_trie_view(trie_true), cudf::detail::make_trie_view(trie_false), cudf::detail::make_trie_view(trie_na), multi_delimiter}; } }; /** * @brief Returns the escaped characters for a given character. * * @param escaped_char The character to escape. * @return The escaped characters for a given character. */ __device__ __forceinline__ thrust::pair<char, char> get_escaped_char(char escaped_char) { switch (escaped_char) { case '"': return {'\\', '"'}; case '\\': return {'\\', '\\'}; case '/': return {'\\', '/'}; case '\b': return {'\\', 'b'}; case '\f': return {'\\', 'f'}; case '\n': return {'\\', 'n'}; case '\r': return {'\\', 'r'}; case '\t': return {'\\', 't'}; // case 'u': return UNICODE_SEQ; default: return {'\0', escaped_char}; } } /** * @brief Returns the numeric value of an ASCII/UTF-8 character. * Handles hexadecimal digits, both uppercase and lowercase * for integral types and only decimal digits for floating point types. * If the character is not a valid numeric digit then `0` is returned and * valid_flag is set to false. * * @param c ASCII or UTF-8 character * @param valid_flag Set to false if input is not valid. Unchanged otherwise. * * @return uint8_t Numeric value of the character, or `0` */ template <typename T, bool as_hex = false> constexpr uint8_t decode_digit(char c, bool* valid_flag) { if (c >= '0' && c <= '9') return c - '0'; if constexpr (as_hex and std::is_integral_v<T>) { if (c >= 'a' && c <= 'f') return c - 'a' + 10; if (c >= 'A' && c <= 'F') return c - 'A' + 10; } *valid_flag = false; return 0; } // Converts character to lowercase. constexpr char to_lower(char const c) { return c >= 'A' && c <= 'Z' ? c + ('a' - 'A') : c; } /** * @brief Checks if string is infinity, case insensitive with/without sign * Valid infinity strings are inf, +inf, -inf, infinity, +infinity, -infinity * String comparison is case insensitive. * * @param begin Pointer to the first element of the string * @param end Pointer to the first element after the string * @return true if string is valid infinity, else false. */ constexpr bool is_infinity(char const* begin, char const* end) { if (*begin == '-' || *begin == '+') begin++; char const* cinf = "infinity"; auto index = begin; while (index < end) { if (*cinf != to_lower(*index)) break; index++; cinf++; } return ((index == begin + 3 || index == begin + 8) && index >= end); } /** * @brief Parses a character string and returns its numeric value. * * @param begin Pointer to the first element of the string * @param end Pointer to the first element after the string * @param opts The global parsing behavior options * @param error_result Value to return on parse error * @tparam base Base (radix) to use for conversion * * @return The parsed and converted value */ template <typename T, int base = 10> __host__ __device__ std::optional<T> parse_numeric(char const* begin, char const* end, parse_options_view const& opts) { T value{}; bool all_digits_valid = true; constexpr bool as_hex = (base == 16); // Handle negative values if necessary int32_t sign = (*begin == '-') ? -1 : 1; // Handle infinity if (std::is_floating_point_v<T> && is_infinity(begin, end)) { return sign * std::numeric_limits<T>::infinity(); } if (*begin == '-' || *begin == '+') begin++; // Skip over the "0x" prefix for hex notation if (base == 16 && begin + 2 < end && *begin == '0' && *(begin + 1) == 'x') { begin += 2; } // Handle the whole part of the number // auto index = begin; while (begin < end) { if (*begin == opts.decimal) { ++begin; break; } else if (base == 10 && (*begin == 'e' || *begin == 'E')) { break; } else if (*begin != opts.thousands && *begin != '+') { value = (value * base) + decode_digit<T, as_hex>(*begin, &all_digits_valid); } ++begin; } if (std::is_floating_point_v<T>) { // Handle fractional part of the number if necessary double divisor = 1; while (begin < end) { if (*begin == 'e' || *begin == 'E') { ++begin; break; } else if (*begin != opts.thousands && *begin != '+') { divisor /= base; value += decode_digit<T, as_hex>(*begin, &all_digits_valid) * divisor; } ++begin; } // Handle exponential part of the number if necessary if (begin < end) { int32_t const exponent_sign = *begin == '-' ? -1 : 1; if (*begin == '-' || *begin == '+') { ++begin; } int32_t exponent = 0; while (begin < end) { exponent = (exponent * 10) + decode_digit<T, as_hex>(*(begin++), &all_digits_valid); } if (exponent != 0) { value *= exp10(double(exponent * exponent_sign)); } } } if (!all_digits_valid) { return std::optional<T>{}; } return value * sign; } namespace gpu { /** * @brief CUDA kernel iterates over the data until the end of the current field * * Also iterates over (one or more) delimiter characters after the field. * Function applies to formats with field delimiters and line terminators. * * @param begin Pointer to the first element of the string * @param end Pointer to the first element after the string * @param opts A set of parsing options * @param escape_char A boolean value to signify whether to consider `\` as escape character or * just a character. * * @return Pointer to the last character in the field, including the * delimiter(s) following the field data */ __device__ __inline__ char const* seek_field_end(char const* begin, char const* end, parse_options_view const& opts, bool escape_char = false) { bool quotation = false; auto current = begin; bool escape_next = false; while (current < end) { // Use simple logic to ignore control chars between any quote seq // Handles nominal cases including doublequotes within quotes, but // may not output exact failures as PANDAS for malformed fields. // Check for instances such as "a2\"bc" and "\\" if `escape_char` is true. if (*current == opts.quotechar and not escape_next) { quotation = !quotation; } else if (!quotation) { if (*current == opts.delimiter) { while (opts.multi_delimiter && (current + 1 < end) && *(current + 1) == opts.delimiter) { ++current; } break; } else if (*current == opts.terminator) { break; } else if (*current == '\r' && (current + 1 < end && *(current + 1) == '\n')) { --end; break; } } if (escape_char) { // If a escape character is encountered, escape next character in next loop. if (not escape_next and *current == '\\') { escape_next = true; } else { escape_next = false; } } if (current < end) { current++; } } return current; } /** * @brief Lexicographically compare digits in input against string * representing an integer * * @param raw_data The pointer to beginning of character string * @param golden The pointer to beginning of character string representing * the value to be compared against * @return bool True if integer represented by character string is less * than or equal to golden data */ template <int N> __device__ __inline__ bool less_equal_than(char const* data, char const (&golden)[N]) { auto mismatch_pair = thrust::mismatch(thrust::seq, data, data + N - 1, golden); if (mismatch_pair.first != data + N - 1) { return *mismatch_pair.first <= *mismatch_pair.second; } else { // Exact match return true; } } /** * @brief Determine which counter to increment when a sequence of digits * and a parity sign is encountered. * * @param raw_data The pointer to beginning of character string * @param digit_count Total number of digits * @param stats Reference to structure with counters * @return Pointer to appropriate counter that belong to * the interpreted data type */ __device__ __inline__ cudf::size_type* infer_integral_field_counter(char const* data_begin, char const* data_end, bool is_negative, column_type_histogram& stats) { static constexpr char uint64_max_abs[] = "18446744073709551615"; static constexpr char int64_min_abs[] = "9223372036854775808"; static constexpr char int64_max_abs[] = "9223372036854775807"; auto digit_count = data_end - data_begin; // Remove preceding zeros if (digit_count >= (sizeof(int64_max_abs) - 1)) { // Trim zeros at the beginning of raw_data while (*data_begin == '0' && (data_begin < data_end)) { data_begin++; } } digit_count = data_end - data_begin; // After trimming the number of digits could be less than maximum // int64 digit count if (digit_count < (sizeof(int64_max_abs) - 1)) { // CASE 0 : Accept validity // If the length of the string representing the integer is smaller // than string length of Int64Max then count this as an integer // representable by int64 // If digit_count is 0 then ignore - sign, i.e. -000..00 should // be treated as a positive small integer return is_negative && (digit_count != 0) ? &stats.negative_small_int_count : &stats.positive_small_int_count; } else if (digit_count > (sizeof(uint64_max_abs) - 1)) { // CASE 1 : Reject validity // If the length of the string representing the integer is greater // than string length of UInt64Max then count this as a string // since it cannot be represented as an int64 or uint64 return &stats.string_count; } else if (digit_count == (sizeof(uint64_max_abs) - 1) && is_negative) { // A negative integer of length UInt64Max digit count cannot be represented // as a 64 bit integer return &stats.string_count; } if (digit_count == (sizeof(int64_max_abs) - 1) && is_negative) { return less_equal_than(data_begin, int64_min_abs) ? &stats.negative_small_int_count : &stats.string_count; } else if (digit_count == (sizeof(int64_max_abs) - 1) && !is_negative) { return less_equal_than(data_begin, int64_max_abs) ? &stats.positive_small_int_count : &stats.big_int_count; } else if (digit_count == (sizeof(uint64_max_abs) - 1)) { return less_equal_than(data_begin, uint64_max_abs) ? &stats.big_int_count : &stats.string_count; } return &stats.string_count; } } // namespace gpu /** * @brief Searches the input character array for each of characters in a set. * Sums up the number of occurrences. If the 'positions' parameter is not void*, * positions of all occurrences are stored in the output device array. * * @param[in] d_data Input character array in device memory * @param[in] keys Vector containing the keys to count in the buffer * @param[in] result_offset Offset to add to the output positions * @param[out] positions Array containing the output positions * @param[in] stream CUDA stream used for device memory operations and kernel launches * * @return cudf::size_type total number of occurrences */ template <class T> cudf::size_type find_all_from_set(device_span<char const> data, std::vector<char> const& keys, uint64_t result_offset, T* positions, rmm::cuda_stream_view stream); /** * @brief Searches the input character array for each of characters in a set. * Sums up the number of occurrences. If the 'positions' parameter is not void*, * positions of all occurrences are stored in the output device array. * * Does not load the entire file into the GPU memory at any time, so it can * be used to parse large files. Output array needs to be preallocated. * * @param[in] h_data Pointer to the input character array * @param[in] h_size Number of bytes in the input array * @param[in] keys Vector containing the keys to count in the buffer * @param[in] result_offset Offset to add to the output positions * @param[out] positions Array containing the output positions * @param[in] stream CUDA stream used for device memory operations and kernel launches * * @return cudf::size_type total number of occurrences */ template <class T> cudf::size_type find_all_from_set(host_span<char const> data, std::vector<char> const& keys, uint64_t result_offset, T* positions, rmm::cuda_stream_view stream); /** * @brief Searches the input character array for each of characters in a set * and sums up the number of occurrences. * * @param d_data Input data buffer in device memory * @param keys Vector containing the keys to count in the buffer * @param stream CUDA stream used for device memory operations and kernel launches * * @return cudf::size_type total number of occurrences */ cudf::size_type count_all_from_set(device_span<char const> data, std::vector<char> const& keys, rmm::cuda_stream_view stream); /** * @brief Searches the input character array for each of characters in a set * and sums up the number of occurrences. * * Does not load the entire buffer into the GPU memory at any time, so it can * be used with buffers of any size. * * @param h_data Pointer to the data in host memory * @param h_size Size of the input data, in bytes * @param keys Vector containing the keys to count in the buffer * @param stream CUDA stream used for device memory operations and kernel launches * * @return cudf::size_type total number of occurrences */ cudf::size_type count_all_from_set(host_span<char const> data, std::vector<char> const& keys, rmm::cuda_stream_view stream); /** * @brief Checks whether the given character is a whitespace character. * * @param ch The character to check * * @return True if the input is whitespace, False otherwise */ __inline__ __device__ bool is_whitespace(char ch) { return ch == '\t' || ch == ' '; } /** * @brief Skips past the current character if it matches the given value. */ template <typename It> __inline__ __device__ It skip_character(It const& it, char ch) { return it + (*it == ch); } /** * @brief Adjusts the range to ignore starting/trailing whitespace and quotation characters. * * @param begin Pointer to the first character in the parsing range * @param end Pointer to the first character after the parsing range * @param quotechar The character used to denote quotes; '\0' if none * * @return Trimmed range */ __inline__ __device__ std::pair<char const*, char const*> trim_whitespaces_quotes( char const* begin, char const* end, char quotechar = '\0') { auto not_whitespace = [] __device__(auto c) { return !is_whitespace(c); }; auto const trim_begin = thrust::find_if(thrust::seq, begin, end, not_whitespace); auto const trim_end = thrust::find_if(thrust::seq, thrust::make_reverse_iterator(end), thrust::make_reverse_iterator(trim_begin), not_whitespace); return {skip_character(trim_begin, quotechar), skip_character(trim_end, quotechar).base()}; } /** * @brief Adjusts the range to ignore starting/trailing whitespace characters. * * @param begin Pointer to the first character in the parsing range * @param end Pointer to the first character after the parsing range * * @return Trimmed range */ __inline__ __device__ std::pair<char const*, char const*> trim_whitespaces(char const* begin, char const* end) { auto not_whitespace = [] __device__(auto c) { return !is_whitespace(c); }; auto const trim_begin = thrust::find_if(thrust::seq, begin, end, not_whitespace); auto const trim_end = thrust::find_if(thrust::seq, thrust::make_reverse_iterator(end), thrust::make_reverse_iterator(trim_begin), not_whitespace); return {trim_begin, trim_end.base()}; } /** * @brief Adjusts the range to ignore starting/trailing quotation characters. * * @param begin Pointer to the first character in the parsing range * @param end Pointer to the first character after the parsing range * @param quotechar The character used to denote quotes. Provide '\0' if no quotes should be * trimmed. * * @return Trimmed range */ __inline__ __device__ std::pair<char const*, char const*> trim_quotes(char const* begin, char const* end, char quotechar) { if ((thrust::distance(begin, end) >= 2 && *begin == quotechar && *thrust::prev(end) == quotechar)) { thrust::advance(begin, 1); thrust::advance(end, -1); } return {begin, end}; } struct ConvertFunctor { /** * @brief Dispatch for numeric types whose values can be convertible to * 0 or 1 to represent boolean false/true, based upon checking against a * true/false values list. * * @return bool Whether the parsed value is valid. */ template <typename T, CUDF_ENABLE_IF(std::is_integral_v<T> and !std::is_same_v<T, bool> and !cudf::is_fixed_point<T>())> __host__ __device__ __forceinline__ bool operator()(char const* begin, char const* end, void* out_buffer, size_t row, data_type const output_type, parse_options_view const& opts, bool as_hex = false) { auto const value = [as_hex, &opts, begin, end]() -> std::optional<T> { // Check for user-specified true/false values auto const field_len = static_cast<size_t>(end - begin); if (serialized_trie_contains(opts.trie_true, {begin, field_len})) { return 1; } if (serialized_trie_contains(opts.trie_false, {begin, field_len})) { return 0; } return as_hex ? cudf::io::parse_numeric<T, 16>(begin, end, opts) : cudf::io::parse_numeric<T>(begin, end, opts); }(); if (value.has_value()) { static_cast<T*>(out_buffer)[row] = *value; } return value.has_value(); } /** * @brief Dispatch for fixed point types. * * @return bool Whether the parsed value is valid. */ template <typename T, CUDF_ENABLE_IF(cudf::is_fixed_point<T>())> __host__ __device__ __forceinline__ bool operator()(char const* begin, char const* end, void* out_buffer, size_t row, data_type const output_type, parse_options_view const& opts, bool as_hex) { // TODO decide what's invalid input and update parsing functions static_cast<device_storage_type_t<T>*>(out_buffer)[row] = [&opts, output_type, begin, end]() -> device_storage_type_t<T> { return strings::detail::parse_decimal<device_storage_type_t<T>>( begin, end, output_type.scale()); }(); return true; } /** * @brief Dispatch for boolean type types. */ template <typename T, CUDF_ENABLE_IF(std::is_same_v<T, bool>)> __host__ __device__ __forceinline__ bool operator()(char const* begin, char const* end, void* out_buffer, size_t row, data_type const output_type, parse_options_view const& opts, bool as_hex) { auto const value = [&opts, begin, end]() -> std::optional<T> { // Check for user-specified true/false values auto const field_len = static_cast<size_t>(end - begin); if (serialized_trie_contains(opts.trie_true, {begin, field_len})) { return static_cast<T>(true); } if (serialized_trie_contains(opts.trie_false, {begin, field_len})) { return static_cast<T>(false); } return cudf::io::parse_numeric<T>(begin, end, opts); }(); if (value.has_value()) { static_cast<T*>(out_buffer)[row] = *value; } return value.has_value(); } /** * @brief Dispatch for floating points, which are set to NaN if the input * is not valid. In such case, the validity mask is set to zero too. */ template <typename T, CUDF_ENABLE_IF(std::is_floating_point_v<T>)> __host__ __device__ __forceinline__ bool operator()(char const* begin, char const* end, void* out_buffer, size_t row, data_type const output_type, parse_options_view const& opts, bool as_hex) { auto const value = [&opts, begin, end]() -> std::optional<T> { // Check for user-specified true/false values auto const field_len = static_cast<size_t>(end - begin); if (serialized_trie_contains(opts.trie_true, {begin, field_len})) { return static_cast<T>(true); } if (serialized_trie_contains(opts.trie_false, {begin, field_len})) { return static_cast<T>(false); } return cudf::io::parse_numeric<T>(begin, end, opts); }(); if (value.has_value()) { static_cast<T*>(out_buffer)[row] = *value; } return value.has_value() and !std::isnan(*value); } /** * @brief Dispatch for remaining supported types, i.e., timestamp and duration types. */ template <typename T, CUDF_ENABLE_IF(!std::is_integral_v<T> and !std::is_floating_point_v<T> and !cudf::is_fixed_point<T>())> __host__ __device__ __forceinline__ bool operator()(char const* begin, char const* end, void* out_buffer, size_t row, data_type const output_type, parse_options_view const& opts, bool as_hex) { // TODO decide what's invalid input and update parsing functions if constexpr (cudf::is_timestamp<T>()) { static_cast<T*>(out_buffer)[row] = to_timestamp<T>(begin, end, opts.dayfirst); } else if constexpr (cudf::is_duration<T>()) { static_cast<T*>(out_buffer)[row] = to_duration<T>(begin, end); } else { return false; } return true; } }; } // namespace io } // namespace cudf
0
rapidsai_public_repos/cudf/cpp/src/io
rapidsai_public_repos/cudf/cpp/src/io/utilities/data_casting.cu
/* * Copyright (c) 2022-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include <io/utilities/parsing_utils.cuh> #include <io/utilities/string_parsing.hpp> #include <cudf/column/column.hpp> #include <cudf/column/column_device_view.cuh> #include <cudf/column/column_factories.hpp> #include <cudf/detail/null_mask.hpp> #include <cudf/detail/nvtx/ranges.hpp> #include <cudf/detail/utilities/cuda.cuh> #include <cudf/detail/utilities/integer_utils.hpp> #include <cudf/strings/detail/strings_children.cuh> #include <cudf/strings/detail/utf8.hpp> #include <cudf/types.hpp> #include <rmm/cuda_stream_view.hpp> #include <rmm/device_buffer.hpp> #include <rmm/exec_policy.hpp> #include <thrust/copy.h> #include <thrust/functional.h> #include <thrust/transform_reduce.h> #include <cub/cub.cuh> #include <memory> #include <type_traits> namespace cudf::io::json::detail { constexpr auto SINGLE_THREAD_THRESHOLD = 128; constexpr auto WARP_THRESHOLD = 128 * 128; // 16K // Unicode code point escape sequence static constexpr char UNICODE_SEQ = 0x7F; // Invalid escape sequence static constexpr char NON_ESCAPE_CHAR = 0x7E; // Unicode code point escape sequence prefix comprises '\' and 'u' characters static constexpr size_type UNICODE_ESC_PREFIX = 2; // Unicode code point escape sequence comprises four hex characters static constexpr size_type UNICODE_HEX_DIGIT_COUNT = 4; // A unicode code point escape sequence is \uXXXX static auto constexpr NUM_UNICODE_ESC_SEQ_CHARS = UNICODE_ESC_PREFIX + UNICODE_HEX_DIGIT_COUNT; static constexpr auto UTF16_HIGH_SURROGATE_BEGIN = 0xD800; static constexpr auto UTF16_HIGH_SURROGATE_END = 0xDC00; static constexpr auto UTF16_LOW_SURROGATE_BEGIN = 0xDC00; static constexpr auto UTF16_LOW_SURROGATE_END = 0xE000; /** * @brief Describing whether data casting of a certain item succeed, the item was parsed to null, or * whether type casting failed. */ enum class data_casting_result { PARSING_SUCCESS, PARSED_TO_NULL, PARSING_FAILURE }; /** * @brief Providing additional information about the type casting result. */ struct data_casting_result_info { // Number of bytes written to output size_type bytes; // Whether parsing succeeded, item was parsed to null, or failed data_casting_result result; }; /** * @brief Returns the character to output for a given escaped character that's following a * backslash. * * @param escaped_char The character following the backslash. * @return The character to output for a given character that's following a backslash */ __device__ __forceinline__ char get_escape_char(char escaped_char) { switch (escaped_char) { case '"': return '"'; case '\\': return '\\'; case '/': return '/'; case 'b': return '\b'; case 'f': return '\f'; case 'n': return '\n'; case 'r': return '\r'; case 't': return '\t'; case 'u': return UNICODE_SEQ; default: return NON_ESCAPE_CHAR; } } /** * @brief Parses the hex value from the four hex digits of a unicode code point escape sequence * \uXXXX. * * @param str Pointer to the first (most-significant) hex digit * @return The parsed hex value if successful, -1 otherwise. */ __device__ __forceinline__ int32_t parse_unicode_hex(char const* str) { // Prepare result int32_t result = 0, base = 1; constexpr int32_t hex_radix = 16; // Iterate over hex digits right-to-left size_type index = UNICODE_HEX_DIGIT_COUNT; while (index-- > 0) { char const ch = str[index]; if (ch >= '0' && ch <= '9') { result += static_cast<int32_t>((ch - '0') + 0) * base; base *= hex_radix; } else if (ch >= 'A' && ch <= 'F') { result += static_cast<int32_t>((ch - 'A') + 10) * base; base *= hex_radix; } else if (ch >= 'a' && ch <= 'f') { result += static_cast<int32_t>((ch - 'a') + 10) * base; base *= hex_radix; } else { return -1; } } return result; } /** * @brief Writes the UTF-8 byte sequence to \p out_it and returns the number of bytes written to * \p out_it */ constexpr size_type write_utf8_char(char_utf8 character, char*& out_it) { auto const bytes = (out_it == nullptr) ? strings::detail::bytes_in_char_utf8(character) : strings::detail::from_char_utf8(character, out_it); if (out_it) out_it += bytes; return bytes; } /** * @brief Processes a string, replaces escape sequences and optionally strips off the quote * characters. * * @tparam in_iterator_t A bidirectional input iterator type whose value_type is convertible to * char * @param in_begin Iterator to the first item to process * @param in_end Iterator to one past the last item to process * @param d_buffer Output character buffer to the first item to write * @param options Settings for controlling string processing behavior * @return A struct of (num_bytes_written, parsing_success_result), where num_bytes_written is * the number of bytes written to d_buffer, parsing_success_result is enum value indicating whether * parsing succeeded, item was parsed to null, or failed. */ template <typename in_iterator_t> __device__ __forceinline__ data_casting_result_info process_string(in_iterator_t in_begin, in_iterator_t in_end, char* d_buffer, cudf::io::parse_options_view const& options) { int32_t bytes = 0; auto const num_in_chars = thrust::distance(in_begin, in_end); // String values are indicated by keeping the quote character bool const is_string_value = num_in_chars >= 2LL && (options.quotechar == '\0' || (*in_begin == options.quotechar) && (*thrust::prev(in_end) == options.quotechar)); // Copy literal/numeric value if (not is_string_value) { bytes += (in_end - in_begin); if (d_buffer) d_buffer = thrust::copy(thrust::seq, in_begin, in_end, d_buffer); return {bytes, data_casting_result::PARSING_SUCCESS}; } char constexpr backslash_char = '\\'; // Escape-flag, set after encountering a backslash character bool is_prev_char_escape = false; // Exclude beginning and ending quote chars from string range if (!options.keepquotes) { ++in_begin; --in_end; } // Iterate over the input while (in_begin != in_end) { // Copy single character to output if (!is_prev_char_escape) { is_prev_char_escape = (*in_begin == backslash_char); if (!is_prev_char_escape) { if (d_buffer) *d_buffer++ = *in_begin; ++bytes; } ++in_begin; continue; } // Previous char indicated beginning of escape sequence // Reset escape flag for next loop iteration is_prev_char_escape = false; // Check the character that is supposed to be escaped auto escaped_char = get_escape_char(*in_begin); // We escaped an invalid escape character -> "fail"/null for this item if (escaped_char == NON_ESCAPE_CHAR) { return {bytes, data_casting_result::PARSING_FAILURE}; } // Regular, single-character escape if (escaped_char != UNICODE_SEQ) { if (d_buffer) *d_buffer++ = escaped_char; ++bytes; ++in_begin; continue; } // This is an escape sequence of a unicode code point: \uXXXX, // where each X in XXXX represents a hex digit // Skip over the 'u' char from \uXXXX to the first hex digit ++in_begin; // Make sure that there's at least 4 characters left from the // input, which are expected to be hex digits if (thrust::distance(in_begin, in_end) < UNICODE_HEX_DIGIT_COUNT) { return {bytes, data_casting_result::PARSING_FAILURE}; } auto hex_val = parse_unicode_hex(in_begin); // Couldn't parse hex values from the four-character sequence -> "fail"/null for this item if (hex_val < 0) { return {bytes, data_casting_result::PARSING_FAILURE}; } // Skip over the four hex digits thrust::advance(in_begin, UNICODE_HEX_DIGIT_COUNT); // If this may be a UTF-16 encoded surrogate pair: // we expect another \uXXXX sequence int32_t hex_low_val = 0; if (hex_val >= UTF16_HIGH_SURROGATE_BEGIN && hex_val < UTF16_HIGH_SURROGATE_END && thrust::distance(in_begin, in_end) >= NUM_UNICODE_ESC_SEQ_CHARS && *in_begin == backslash_char && *thrust::next(in_begin) == 'u') { // Try to parse hex value following the '\' and 'u' characters from what may be a UTF16 low // surrogate hex_low_val = parse_unicode_hex(thrust::next(in_begin, 2)); } // This is indeed a UTF16 surrogate pair if (hex_val >= UTF16_HIGH_SURROGATE_BEGIN && hex_val < UTF16_HIGH_SURROGATE_END && hex_low_val >= UTF16_LOW_SURROGATE_BEGIN && hex_low_val < UTF16_LOW_SURROGATE_END) { // Skip over the second \uXXXX sequence thrust::advance(in_begin, NUM_UNICODE_ESC_SEQ_CHARS); // Compute UTF16-encoded code point uint32_t unicode_code_point = 0x10000 + ((hex_val - UTF16_HIGH_SURROGATE_BEGIN) << 10) + (hex_low_val - UTF16_LOW_SURROGATE_BEGIN); auto utf8_chars = strings::detail::codepoint_to_utf8(unicode_code_point); bytes += write_utf8_char(utf8_chars, d_buffer); } else { // Just a single \uXXXX sequence auto utf8_chars = strings::detail::codepoint_to_utf8(hex_val); bytes += write_utf8_char(utf8_chars, d_buffer); } } // The last character of the input is a backslash -> "fail"/null for this item if (is_prev_char_escape) { return {bytes, data_casting_result::PARSING_FAILURE}; } return {bytes, data_casting_result::PARSING_SUCCESS}; } /** * @brief Data structure to hold 1 bit per thread with previous `UNICODE_LOOK_BACK` bits stored in a * warp. * * @tparam num_warps number of warps in the block */ template <unsigned num_warps> struct bitfield_warp { static constexpr auto UNICODE_LOOK_BACK{5}; // 5 because for skipping unicode hex chars, look back up to 5 chars are needed. // 5+32 for each warp. bool is_slash[num_warps][UNICODE_LOOK_BACK + cudf::detail::warp_size]; /// Sets all bits to 0 __device__ void reset(unsigned warp_id) { if (threadIdx.x % cudf::detail::warp_size < UNICODE_LOOK_BACK) { is_slash[warp_id][threadIdx.x % cudf::detail::warp_size] = 0; } is_slash[warp_id][threadIdx.x % cudf::detail::warp_size + UNICODE_LOOK_BACK] = 0; } /// Shifts UNICODE_LOOK_BACK bits to the left to hold the previous UNICODE_LOOK_BACK bits __device__ void shift(unsigned warp_id) { if (threadIdx.x % cudf::detail::warp_size < UNICODE_LOOK_BACK) is_slash[warp_id][threadIdx.x % cudf::detail::warp_size] = is_slash[warp_id][cudf::detail::warp_size + threadIdx.x % cudf::detail::warp_size]; __syncwarp(); } /// Each thread in a warp sets its own bit. __device__ void set_bits(unsigned warp_id, bool is_escaping_backslash) { is_slash[warp_id][UNICODE_LOOK_BACK + threadIdx.x % cudf::detail::warp_size] = is_escaping_backslash; __syncwarp(); } /// Each thread in a warp gets the requested bit. __device__ bool get_bit(unsigned warp_id, int bit_index) { return is_slash[warp_id][UNICODE_LOOK_BACK + bit_index]; } }; /** * @brief Data structure to hold 1 bit per thread with previous `UNICODE_LOOK_BACK` bits stored in a * block. * * @tparam num_warps number of warps in the block */ template <unsigned num_warps> struct bitfield_block { static constexpr auto UNICODE_LOOK_BACK{5}; // 5 because for skipping unicode hex chars, look back up to 5 chars are needed. // 5 + num_warps*32 for entire block bool is_slash[UNICODE_LOOK_BACK + num_warps * cudf::detail::warp_size]; /// Sets all bits to 0 __device__ void reset(unsigned warp_id) { if (threadIdx.x < UNICODE_LOOK_BACK) { is_slash[threadIdx.x] = 0; } is_slash[threadIdx.x + UNICODE_LOOK_BACK] = 0; } /// Shifts UNICODE_LOOK_BACK bits to the left to hold the previous UNICODE_LOOK_BACK bits __device__ void shift(unsigned warp_id) { if (threadIdx.x < UNICODE_LOOK_BACK) is_slash[threadIdx.x] = is_slash[num_warps * cudf::detail::warp_size + threadIdx.x]; __syncthreads(); } /// Each thread in a block sets its own bit. __device__ void set_bits(unsigned warp_id, bool is_escaping_backslash) { is_slash[UNICODE_LOOK_BACK + threadIdx.x] = is_escaping_backslash; __syncthreads(); } /// Each thread in a block gets the requested bit. __device__ bool get_bit(unsigned warp_id, int bit_index) { return is_slash[UNICODE_LOOK_BACK + bit_index]; } }; // Algorithm: warp/block parallel version of string_parse and process_string() // Decoding character classes (u8, u16, \*, *): // character count: input->output // \uXXXX 6->2/3/4 // \uXXXX\uXXXX 12->2/3/4 // \" 2->1 // * 1->1 // // ERROR conditions. (all collaborating threads quit) // c=='\' & curr_idx == end_idx-1; // [c-1]=='\' & get_escape[c]==NEC // [c-1]=='\' & [c]=='u' & end_idx-curr_idx < UNICODE_HEX_DIGIT_COUNT // [c-1]=='\' & [c]=='u' & end_idx-curr_idx >= UNICODE_HEX_DIGIT_COUNT && non-hex // // skip conditions. (current thread skips this char, no output) // c=='\' skip. (Escaping char only) // [c-2]=='\' && [c-1]=='u' for [2,1], [3,2] [4,5], [5, 6], skip. // // write conditions. (write to d_buffer) // [c-1]!='\' & [c]!='\' write [c] // [c-1]!='\' & [c]=='\' skip (already covered in skip conditions) // [c-1]=='\' & [c]!=NEC && [c]!=UNICODE_SEQ, write [c] // [c-1]=='\' & [c]=='u' & end_idx-curr_idx >= UNICODE_HEX_DIGIT_COUNT && hex, DECODE // [c+1:4]=curr_hex_val // // if [c+5]=='\' & [c+6]=='u' & end_idx-curr_idx >= UNICODE_HEX_DIGIT_COUNT && // hex,DECODE [c+7:4]=next_hex_val // // if [c-7]=='\' & [c-6]=='u' & end_idx-curr_idx >= UNICODE_HEX_DIGIT_COUNT && // hex,DECODE [c-5:4]=prev_hex_val prev_hex_val, curr_hex_val, next_hex_val // // if prev_hex_val in high, curr_hex_val in low, skip. // // if curr_hex_val in high, next_hex_val in low, write [u16] // if curr_hex_val not in high, write [u8] // before writing, find num of output characters per threads, // then do intra-warp/intra-block scan for out_idx // propagate offset from next iteration to carry forward. // Uses 1 warp per string or 1 block per string /** * @brief Warp/Block parallel version of string_parse functor * * @tparam is_warp True if 1 warp per string, False if 1 block per string * @tparam num_warps Number of warps per block * @tparam str_tuple_it Iterator type for tuple with string pointer and its length * @param str_tuples iterator of tuple with string pointer and its length * @param total_out_strings Number of string rows to be processed * @param str_counter Counter to keep track of processed number of strings * @param null_mask Null mask * @param null_count_data pointer to store null count * @param options Settings for controlling string processing behavior * @param d_offsets Offsets to identify where to store the results for each string * @param d_chars Character array to store the characters of strings */ template <bool is_warp, size_type num_warps, typename str_tuple_it> __global__ void parse_fn_string_parallel(str_tuple_it str_tuples, size_type total_out_strings, size_type* str_counter, bitmask_type* null_mask, size_type* null_count_data, cudf::io::parse_options_view const options, size_type* d_offsets, char* d_chars) { constexpr auto BLOCK_SIZE = is_warp ? cudf::detail::warp_size : cudf::detail::warp_size * num_warps; size_type lane = is_warp ? (threadIdx.x % BLOCK_SIZE) : threadIdx.x; // get 1-string index per warp/block auto get_next_string = [&]() { if constexpr (is_warp) { size_type istring; if (lane == 0) { istring = atomicAdd(str_counter, 1); } return __shfl_sync(0xffffffff, istring, 0); } else { // Ensure lane 0 doesn't update istring before all threads have read the previous iteration's // istring value __syncthreads(); __shared__ size_type istring; if (lane == 0) { istring = atomicAdd(str_counter, 1); } __syncthreads(); return istring; } }; // grid-stride loop. for (size_type istring = get_next_string(); istring < total_out_strings; istring = get_next_string()) { // skip nulls if (null_mask != nullptr && not bit_is_set(null_mask, istring)) { if (!d_chars && lane == 0) d_offsets[istring] = 0; continue; // gride-stride return; } auto in_begin = str_tuples[istring].first; auto in_end = in_begin + str_tuples[istring].second; auto const num_in_chars = str_tuples[istring].second; if constexpr (is_warp) { if (num_in_chars <= SINGLE_THREAD_THRESHOLD or num_in_chars > WARP_THRESHOLD) continue; } else { if (num_in_chars <= WARP_THRESHOLD) continue; } // Check if the value corresponds to the null literal if (!d_chars) { auto const is_null_literal = serialized_trie_contains( options.trie_na, {in_begin, static_cast<std::size_t>(num_in_chars)}); if (is_null_literal && null_mask != nullptr) { if (lane == 0) { clear_bit(null_mask, istring); atomicAdd(null_count_data, 1); if (!d_chars) d_offsets[istring] = 0; } continue; // gride-stride return; } } // String values are indicated by keeping the quote character bool const is_string_value = num_in_chars >= 2LL && (options.quotechar == '\0' || (*in_begin == options.quotechar) && (*thrust::prev(in_end) == options.quotechar)); char* d_buffer = d_chars ? d_chars + d_offsets[istring] : nullptr; // Copy literal/numeric value if (not is_string_value) { if (!d_chars) { if (lane == 0) { d_offsets[istring] = in_end - in_begin; } } else { for (thread_index_type char_index = lane; char_index < (in_end - in_begin); char_index += BLOCK_SIZE) { d_buffer[char_index] = in_begin[char_index]; } } continue; // gride-stride return; } // Exclude beginning and ending quote chars from string range if (!options.keepquotes) { ++in_begin; --in_end; } // warp-parallelized or block-parallelized process_string() auto is_hex = [](auto ch) { return (ch >= '0' && ch <= '9') || (ch >= 'A' && ch <= 'F') || (ch >= 'a' && ch <= 'f'); }; // for backslash scan calculation: is_previous_escaping_backslash [[maybe_unused]] auto warp_id = threadIdx.x / cudf::detail::warp_size; bool init_state_reg; __shared__ bool init_state_shared; size_type last_offset_reg; __shared__ size_type last_offset_shared; bool& init_state(is_warp ? init_state_reg : init_state_shared); size_type& last_offset(is_warp ? last_offset_reg : last_offset_shared); if (is_warp || lane == 0) { init_state = false; last_offset = 0; } using bitfield = std::conditional_t<is_warp, bitfield_warp<num_warps>, bitfield_block<num_warps>>; __shared__ bitfield is_slash; is_slash.reset(warp_id); __syncthreads(); // 0-31, 32-63, ... i*32-n. // entire warp executes but with mask. for (thread_index_type char_index = lane; char_index < cudf::util::round_up_safe(in_end - in_begin, static_cast<long>(BLOCK_SIZE)); char_index += BLOCK_SIZE) { bool const is_within_bounds = char_index < (in_end - in_begin); auto const c = is_within_bounds ? in_begin[char_index] : '\0'; auto const prev_c = (char_index > 0 and is_within_bounds) ? in_begin[char_index - 1] : '\0'; auto const escaped_char = get_escape_char(c); bool is_escaping_backslash{false}; [[maybe_unused]] bool is_prev_escaping_backslash{false}; // To check current is backslash by checking if previous is backslash. // curr = !prev & c=='\\' // So, scan is required from beginning of string. // State table approach (intra-warp FST) (intra-block FST) // 2 states: Not-Slash(NS), Slash(S). // prev / * // NS S NS // S NS NS // After inclusive scan, all current S states translate to escaping backslash. // All escaping backslash should be skipped. struct state_table { // using bit fields instead of state[2] bool state0 : 1; bool state1 : 1; bool inline __device__ get(bool init_state) const { return init_state ? state1 : state0; } }; state_table curr{is_within_bounds && c == '\\', false}; // state transition vector. auto composite_op = [](state_table op1, state_table op2) { // equivalent of state_table{op2.state[op1.state[0]], op2.state[op1.state[1]]}; return state_table{op1.state0 ? op2.state1 : op2.state0, op1.state1 ? op2.state1 : op2.state0}; }; state_table scanned; // inclusive scan of escaping backslashes if constexpr (is_warp) { using SlashScan = cub::WarpScan<state_table>; __shared__ typename SlashScan::TempStorage temp_slash[num_warps]; SlashScan(temp_slash[warp_id]).InclusiveScan(curr, scanned, composite_op); is_escaping_backslash = scanned.get(init_state); init_state = __shfl_sync(~0u, is_escaping_backslash, BLOCK_SIZE - 1); __syncwarp(); is_slash.shift(warp_id); is_slash.set_bits(warp_id, is_escaping_backslash); is_prev_escaping_backslash = is_slash.get_bit(warp_id, lane - 1); } else { using SlashScan = cub::BlockScan<state_table, BLOCK_SIZE>; __shared__ typename SlashScan::TempStorage temp_slash; SlashScan(temp_slash).InclusiveScan(curr, scanned, composite_op); is_escaping_backslash = scanned.get(init_state); __syncthreads(); if (threadIdx.x == BLOCK_SIZE - 1) init_state = is_escaping_backslash; __syncthreads(); is_slash.shift(warp_id); is_slash.set_bits(warp_id, is_escaping_backslash); is_prev_escaping_backslash = is_slash.get_bit(warp_id, lane - 1); // There is another __syncthreads() at the end of for-loop. } // String with parsing errors are made as null bool error = false; if (is_within_bounds) { // curr=='\' and end, or prev=='\' and curr=='u' and end-curr < UNICODE_HEX_DIGIT_COUNT // or prev=='\' and curr=='u' and end-curr >= UNICODE_HEX_DIGIT_COUNT and any non-hex error |= (is_escaping_backslash /*c == '\\'*/ && char_index == (in_end - in_begin) - 1); error |= (is_prev_escaping_backslash && escaped_char == NON_ESCAPE_CHAR); error |= (is_prev_escaping_backslash && c == 'u' && ((in_begin + char_index + UNICODE_HEX_DIGIT_COUNT >= in_end) | !is_hex(in_begin[char_index + 1]) | !is_hex(in_begin[char_index + 2]) | !is_hex(in_begin[char_index + 3]) | !is_hex(in_begin[char_index + 4]))); } // Make sure all threads have no errors before continuing if constexpr (is_warp) { error = __any_sync(~0u, error); } else { using ErrorReduce = cub::BlockReduce<bool, BLOCK_SIZE>; __shared__ typename ErrorReduce::TempStorage temp_storage_error; __shared__ bool error_reduced; error_reduced = ErrorReduce(temp_storage_error).Sum(error); // TODO use cub::LogicalOR. // only valid in thread0, so shared memory is used for broadcast. __syncthreads(); error = error_reduced; } // If any thread has an error, skip the rest of the string and make this string as null if (error) { if (!d_chars && lane == 0) { if (null_mask != nullptr) { clear_bit(null_mask, istring); atomicAdd(null_count_data, 1); } last_offset = 0; d_offsets[istring] = 0; } if constexpr (!is_warp) { __syncthreads(); } break; // gride-stride return; } // Skipping non-copied escaped characters bool skip = !is_within_bounds; // false; // skip \ for \" \\ \/ \b \f \n \r \t \uXXXX skip |= is_escaping_backslash; if (is_within_bounds) { // skip X for each X in \uXXXX skip |= char_index >= 2 && is_slash.get_bit(warp_id, lane - 2) && in_begin[char_index - 1] == 'u'; skip |= char_index >= 3 && is_slash.get_bit(warp_id, lane - 3) && in_begin[char_index - 2] == 'u'; skip |= char_index >= 4 && is_slash.get_bit(warp_id, lane - 4) && in_begin[char_index - 3] == 'u'; skip |= char_index >= 5 && is_slash.get_bit(warp_id, lane - 5) && in_begin[char_index - 4] == 'u'; } int this_num_out = 0; cudf::char_utf8 write_char{}; if (!skip) { // 1. Unescaped character if (!is_prev_escaping_backslash) { this_num_out = 1; // writes char directly for non-unicode } else { // 2. Escaped character if (escaped_char != UNICODE_SEQ) { this_num_out = 1; // writes char directly for non-unicode } else { // 3. Unicode // UTF8 \uXXXX auto hex_val = parse_unicode_hex(in_begin + char_index + 1); auto hex_low_val = 0; // UTF16 \uXXXX\uXXXX // Note: no need for scanned_backslash below because we already know that // only '\u' check is enough. if (hex_val >= UTF16_HIGH_SURROGATE_BEGIN && hex_val < UTF16_HIGH_SURROGATE_END && (in_begin + char_index + UNICODE_HEX_DIGIT_COUNT + NUM_UNICODE_ESC_SEQ_CHARS) < in_end && in_begin[char_index + NUM_UNICODE_ESC_SEQ_CHARS - 1] == '\\' && in_begin[char_index + NUM_UNICODE_ESC_SEQ_CHARS] == 'u') { hex_low_val = parse_unicode_hex(in_begin + char_index + 1 + 6); } if (hex_val >= UTF16_HIGH_SURROGATE_BEGIN && hex_val < UTF16_HIGH_SURROGATE_END && hex_low_val >= UTF16_LOW_SURROGATE_BEGIN && hex_low_val < UTF16_LOW_SURROGATE_END) { // Compute UTF16-encoded code point uint32_t unicode_code_point = 0x10000 + ((hex_val - UTF16_HIGH_SURROGATE_BEGIN) << 10) + (hex_low_val - UTF16_LOW_SURROGATE_BEGIN); write_char = strings::detail::codepoint_to_utf8(unicode_code_point); this_num_out = strings::detail::bytes_in_char_utf8(write_char); } else { // if hex_val is high surrogate, ideally it should be parsing failure. // but skipping it as other parsers do this too. if (hex_val >= UTF16_LOW_SURROGATE_BEGIN && hex_val < UTF16_LOW_SURROGATE_END) { // Ideally this should be skipped if previous char is high surrogate. skip = true; this_num_out = 0; write_char = 0; } else { // if UTF8 write_char = strings::detail::codepoint_to_utf8(hex_val); this_num_out = strings::detail::bytes_in_char_utf8(write_char); } } } } } // !skip end. { // compute offset to write output for each thread size_type offset; if constexpr (is_warp) { using OffsetScan = cub::WarpScan<size_type>; __shared__ typename OffsetScan::TempStorage temp_storage[num_warps]; OffsetScan(temp_storage[warp_id]).ExclusiveSum(this_num_out, offset); } else { using OffsetScan = cub::BlockScan<size_type, BLOCK_SIZE>; __shared__ typename OffsetScan::TempStorage temp_storage; OffsetScan(temp_storage).ExclusiveSum(this_num_out, offset); __syncthreads(); } offset += last_offset; // Write output if (d_chars && !skip) { auto const is_not_unicode = (!is_prev_escaping_backslash) || escaped_char != UNICODE_SEQ; if (is_not_unicode) { *(d_buffer + offset) = (!is_prev_escaping_backslash) ? c : escaped_char; } else { strings::detail::from_char_utf8(write_char, d_buffer + offset); } } offset += this_num_out; if constexpr (is_warp) { last_offset = __shfl_sync(0xffffffff, offset, BLOCK_SIZE - 1); } else { __syncthreads(); if (threadIdx.x == BLOCK_SIZE - 1) last_offset = offset; __syncthreads(); } } } // char for-loop if (!d_chars && lane == 0) { d_offsets[istring] = last_offset; } } // grid-stride for-loop } template <typename str_tuple_it> struct string_parse { str_tuple_it str_tuples; bitmask_type* null_mask; size_type* null_count_data; cudf::io::parse_options_view const options; size_type* d_offsets{}; char* d_chars{}; __device__ void operator()(size_type idx) { if (null_mask != nullptr && not bit_is_set(null_mask, idx)) { if (!d_chars) d_offsets[idx] = 0; return; } auto const in_begin = str_tuples[idx].first; auto const in_end = in_begin + str_tuples[idx].second; auto const num_in_chars = str_tuples[idx].second; if (num_in_chars > SINGLE_THREAD_THRESHOLD) return; // Check if the value corresponds to the null literal if (!d_chars) { auto const is_null_literal = serialized_trie_contains( options.trie_na, {in_begin, static_cast<std::size_t>(num_in_chars)}); if (is_null_literal && null_mask != nullptr) { clear_bit(null_mask, idx); atomicAdd(null_count_data, 1); if (!d_chars) d_offsets[idx] = 0; return; } } char* d_buffer = d_chars ? d_chars + d_offsets[idx] : nullptr; auto str_process_info = process_string(in_begin, in_end, d_buffer, options); if (str_process_info.result != data_casting_result::PARSING_SUCCESS) { if (null_mask != nullptr) { clear_bit(null_mask, idx); atomicAdd(null_count_data, 1); } if (!d_chars) d_offsets[idx] = 0; } else { if (!d_chars) d_offsets[idx] = str_process_info.bytes; } } }; template <typename SymbolT> struct to_string_view_pair { SymbolT const* data; to_string_view_pair(SymbolT const* _data) : data(_data) {} __device__ auto operator()(thrust::tuple<size_type, size_type> ip) { return thrust::pair<char const*, std::size_t>{data + thrust::get<0>(ip), static_cast<std::size_t>(thrust::get<1>(ip))}; } }; template <typename string_view_pair_it> static std::unique_ptr<column> parse_string(string_view_pair_it str_tuples, size_type col_size, rmm::device_buffer&& null_mask, rmm::device_scalar<size_type>& d_null_count, cudf::io::parse_options_view const& options, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { // CUDF_FUNC_RANGE(); auto const max_length = thrust::transform_reduce( rmm::exec_policy(stream), str_tuples, str_tuples + col_size, [] __device__(auto t) { return t.second; }, size_type{0}, thrust::maximum<size_type>{}); auto offsets = cudf::make_numeric_column( data_type{type_to_id<size_type>()}, col_size + 1, cudf::mask_state::UNALLOCATED, stream, mr); auto d_offsets = offsets->mutable_view().data<size_type>(); auto null_count_data = d_null_count.data(); auto single_thread_fn = string_parse<decltype(str_tuples)>{ str_tuples, static_cast<bitmask_type*>(null_mask.data()), null_count_data, options, d_offsets}; thrust::for_each_n(rmm::exec_policy(stream), thrust::make_counting_iterator<size_type>(0), col_size, single_thread_fn); constexpr auto warps_per_block = 8; constexpr int threads_per_block = cudf::detail::warp_size * warps_per_block; auto num_blocks = cudf::util::div_rounding_up_safe(col_size, warps_per_block); auto str_counter = cudf::numeric_scalar(size_type{0}, true, stream); // TODO run these independent kernels in parallel streams. if (max_length > SINGLE_THREAD_THRESHOLD) { parse_fn_string_parallel<true, warps_per_block> <<<num_blocks, threads_per_block, 0, stream.value()>>>( str_tuples, col_size, str_counter.data(), static_cast<bitmask_type*>(null_mask.data()), null_count_data, options, d_offsets, nullptr); } if (max_length > WARP_THRESHOLD) { // for strings longer than WARP_THRESHOLD, 1 block per string str_counter.set_value(0, stream); parse_fn_string_parallel<false, warps_per_block> <<<num_blocks, threads_per_block, 0, stream.value()>>>( str_tuples, col_size, str_counter.data(), static_cast<bitmask_type*>(null_mask.data()), null_count_data, options, d_offsets, nullptr); } auto const bytes = cudf::detail::sizes_to_offsets(d_offsets, d_offsets + col_size + 1, d_offsets, stream); CUDF_EXPECTS(bytes <= std::numeric_limits<size_type>::max(), "Size of output exceeds the column size limit", std::overflow_error); // CHARS column std::unique_ptr<column> chars = strings::detail::create_chars_child_column(static_cast<size_type>(bytes), stream, mr); auto d_chars = chars->mutable_view().data<char>(); single_thread_fn.d_chars = d_chars; thrust::for_each_n(rmm::exec_policy(stream), thrust::make_counting_iterator<size_type>(0), col_size, single_thread_fn); if (max_length > SINGLE_THREAD_THRESHOLD) { str_counter.set_value(0, stream); parse_fn_string_parallel<true, warps_per_block> <<<num_blocks, threads_per_block, 0, stream.value()>>>( str_tuples, col_size, str_counter.data(), static_cast<bitmask_type*>(null_mask.data()), null_count_data, options, d_offsets, d_chars); } if (max_length > WARP_THRESHOLD) { str_counter.set_value(0, stream); // for strings longer than WARP_THRESHOLD, 1 block per string parse_fn_string_parallel<false, warps_per_block> <<<num_blocks, threads_per_block, 0, stream.value()>>>( str_tuples, col_size, str_counter.data(), static_cast<bitmask_type*>(null_mask.data()), null_count_data, options, d_offsets, d_chars); } return make_strings_column(col_size, std::move(offsets), std::move(chars), d_null_count.value(stream), std::move(null_mask)); } std::unique_ptr<column> parse_data( const char* data, thrust::zip_iterator<thrust::tuple<const size_type*, const size_type*>> offset_length_begin, size_type col_size, data_type col_type, rmm::device_buffer&& null_mask, size_type null_count, cudf::io::parse_options_view const& options, rmm::cuda_stream_view stream, rmm::mr::device_memory_resource* mr) { CUDF_FUNC_RANGE(); if (col_size == 0) { return make_empty_column(col_type); } auto d_null_count = rmm::device_scalar<size_type>(null_count, stream); auto null_count_data = d_null_count.data(); if (null_mask.is_empty()) { null_mask = cudf::detail::create_null_mask(col_size, mask_state::ALL_VALID, stream, mr); } // Prepare iterator that returns (string_ptr, string_length)-pairs needed by type conversion auto str_tuples = thrust::make_transform_iterator(offset_length_begin, to_string_view_pair{data}); if (col_type == cudf::data_type{cudf::type_id::STRING}) { return parse_string( str_tuples, col_size, std::move(null_mask), d_null_count, options, stream, mr); } auto out_col = make_fixed_width_column(col_type, col_size, std::move(null_mask), null_count, stream, mr); auto output_dv_ptr = mutable_column_device_view::create(*out_col, stream); // use `ConvertFunctor` to convert non-string values thrust::for_each_n( rmm::exec_policy(stream), thrust::make_counting_iterator<size_type>(0), col_size, [str_tuples, col = *output_dv_ptr, options, col_type, null_count_data] __device__( size_type row) { if (col.is_null(row)) { return; } auto const in = str_tuples[row]; auto const is_null_literal = serialized_trie_contains(options.trie_na, {in.first, static_cast<size_t>(in.second)}); if (is_null_literal) { col.set_null(row); atomicAdd(null_count_data, 1); return; } // If this is a string value, remove quotes auto [in_begin, in_end] = trim_quotes(in.first, in.first + in.second, options.quotechar); auto const is_parsed = cudf::type_dispatcher(col_type, ConvertFunctor{}, in_begin, in_end, col.data<char>(), row, col_type, options, false); if (not is_parsed) { col.set_null(row); atomicAdd(null_count_data, 1); } }); out_col->set_null_count(d_null_count.value(stream)); return out_col; } } // namespace cudf::io::json::detail
0