index
int64 0
0
| repo_id
stringclasses 179
values | file_path
stringlengths 26
186
| content
stringlengths 1
2.1M
| __index_level_0__
int64 0
9
|
---|---|---|---|---|
0 | hf_public_repos/candle/candle-transformers/src/models | hf_public_repos/candle/candle-transformers/src/models/clip/text_model.rs | //! Contrastive Language-Image Pre-Training
//!
//! Contrastive Language-Image Pre-Training (CLIP) is an architecture trained on
//! pairs of images with related texts.
//!
//! - [GH](https://github.com/openai/CLIP)
//! - [Code](https://github.com/huggingface/transformers/tree/f6fa0f0bf0796ac66f201f23bdb8585de1609add/src/transformers/models/clip)
use candle::{DType, Device, IndexOp, Result, Tensor, D};
use candle_nn as nn;
use candle_nn::Module;
use super::EncoderConfig;
#[derive(Debug, Clone, Copy)]
pub enum Activation {
QuickGelu,
}
impl Module for Activation {
fn forward(&self, xs: &Tensor) -> Result<Tensor> {
match self {
Activation::QuickGelu => xs * nn::ops::sigmoid(&(xs * 1.702f64)?)?,
}
}
}
#[derive(Debug, Clone)]
pub struct ClipTextConfig {
pub vocab_size: usize,
pub embed_dim: usize,
pub activation: Activation,
pub intermediate_size: usize,
pub max_position_embeddings: usize,
pub pad_with: Option<String>,
pub num_hidden_layers: usize,
pub num_attention_heads: usize,
#[allow(dead_code)]
pub projection_dim: usize,
}
impl ClipTextConfig {
// The config details can be found in the "text_config" section of this json file:
// https://huggingface.co/openai/clip-vit-large-patch14/blob/main/config.json
pub fn vit_base_patch32() -> Self {
Self {
vocab_size: 49408,
embed_dim: 512,
intermediate_size: 2048,
max_position_embeddings: 77,
pad_with: None,
num_hidden_layers: 12,
num_attention_heads: 8,
projection_dim: 512,
activation: Activation::QuickGelu,
}
}
}
// ClipTextEmbeddings mostly based on the existing implementation in the stable diffision model.
// TODO rewrite to be more similar to https://github.com/huggingface/transformers/blob/f6fa0f0bf0796ac66f201f23bdb8585de1609add/src/transformers/models/clip/modeling_clip.py#L142
#[derive(Clone, Debug)]
struct ClipTextEmbeddings {
token_embedding: candle_nn::Embedding,
position_embedding: candle_nn::Embedding,
position_ids: Tensor,
}
impl ClipTextEmbeddings {
fn new(vs: candle_nn::VarBuilder, c: &ClipTextConfig) -> Result<Self> {
let token_embedding =
candle_nn::embedding(c.vocab_size, c.embed_dim, vs.pp("token_embedding"))?;
let position_embedding: nn::Embedding = candle_nn::embedding(
c.max_position_embeddings,
c.embed_dim,
vs.pp("position_embedding"),
)?;
let position_ids =
Tensor::arange(0u32, c.max_position_embeddings as u32, vs.device())?.unsqueeze(0)?;
Ok(Self {
token_embedding,
position_embedding,
position_ids,
})
}
}
impl Module for ClipTextEmbeddings {
fn forward(&self, input_ids: &Tensor) -> Result<Tensor> {
let seq_length = input_ids.dim(D::Minus1)?;
let inputs_embeds = self.token_embedding.forward(input_ids)?;
let position_ids = self.position_ids.narrow(1, 0, seq_length)?;
let position_embedding = self.position_embedding.forward(&position_ids)?;
inputs_embeds.broadcast_add(&position_embedding)
}
}
#[derive(Clone, Debug)]
struct ClipAttention {
k_proj: candle_nn::Linear,
v_proj: candle_nn::Linear,
q_proj: candle_nn::Linear,
out_proj: candle_nn::Linear,
head_dim: usize,
scale: f64,
num_attention_heads: usize,
}
impl ClipAttention {
fn new(vs: candle_nn::VarBuilder, c: &EncoderConfig) -> Result<Self> {
let embed_dim = c.embed_dim();
let num_attention_heads = c.num_attention_heads();
let k_proj = candle_nn::linear(embed_dim, embed_dim, vs.pp("k_proj"))?;
let v_proj = candle_nn::linear(embed_dim, embed_dim, vs.pp("v_proj"))?;
let q_proj = candle_nn::linear(embed_dim, embed_dim, vs.pp("q_proj"))?;
let out_proj = candle_nn::linear(embed_dim, embed_dim, vs.pp("out_proj"))?;
let head_dim = embed_dim / num_attention_heads;
let scale = (head_dim as f64).powf(-0.5);
Ok(ClipAttention {
k_proj,
v_proj,
q_proj,
out_proj,
head_dim,
scale,
num_attention_heads,
})
}
fn shape(&self, xs: &Tensor, seq_len: usize, bsz: usize) -> Result<Tensor> {
xs.reshape((bsz, seq_len, self.num_attention_heads, self.head_dim))?
.transpose(1, 2)?
.contiguous()
}
fn forward(&self, xs: &Tensor, causal_attention_mask: Option<&Tensor>) -> Result<Tensor> {
let in_dtype = xs.dtype();
let (bsz, seq_len, embed_dim) = xs.dims3()?;
let query_states = (self.q_proj.forward(xs)? * self.scale)?;
let proj_shape = (bsz * self.num_attention_heads, seq_len, self.head_dim);
let query_states = self
.shape(&query_states, seq_len, bsz)?
.reshape(proj_shape)?
.to_dtype(DType::F32)?;
let key_states = self
.shape(&self.k_proj.forward(xs)?, seq_len, bsz)?
.reshape(proj_shape)?
.to_dtype(DType::F32)?;
let value_states = self
.shape(&self.v_proj.forward(xs)?, seq_len, bsz)?
.reshape(proj_shape)?
.to_dtype(DType::F32)?;
let attn_weights = query_states.matmul(&key_states.transpose(1, 2)?)?;
let src_len = key_states.dim(1)?;
let attn_weights = if let Some(causal_attention_mask) = causal_attention_mask {
attn_weights
.reshape((bsz, self.num_attention_heads, seq_len, src_len))?
.broadcast_add(causal_attention_mask)?
.reshape((bsz * self.num_attention_heads, seq_len, src_len))?
} else {
attn_weights
};
let attn_weights = candle_nn::ops::softmax(&attn_weights, D::Minus1)?;
let attn_output = attn_weights.matmul(&value_states)?.to_dtype(in_dtype)?;
let attn_output = attn_output
.reshape((bsz, self.num_attention_heads, seq_len, self.head_dim))?
.transpose(1, 2)?
.reshape((bsz, seq_len, embed_dim))?;
self.out_proj.forward(&attn_output)
}
}
#[derive(Clone, Debug)]
struct ClipMlp {
fc1: candle_nn::Linear,
fc2: candle_nn::Linear,
activation: Activation,
}
impl ClipMlp {
fn new(vs: candle_nn::VarBuilder, c: &EncoderConfig) -> Result<Self> {
let fc1 = candle_nn::linear(c.embed_dim(), c.intermediate_size(), vs.pp("fc1"))?;
let fc2 = candle_nn::linear(c.intermediate_size(), c.embed_dim(), vs.pp("fc2"))?;
Ok(ClipMlp {
fc1,
fc2,
activation: c.activation(),
})
}
}
impl ClipMlp {
fn forward(&self, xs: &Tensor) -> Result<Tensor> {
let xs = self.fc1.forward(xs)?;
self.fc2.forward(&self.activation.forward(&xs)?)
}
}
#[derive(Clone, Debug)]
struct ClipEncoderLayer {
self_attn: ClipAttention,
layer_norm1: candle_nn::LayerNorm,
mlp: ClipMlp,
layer_norm2: candle_nn::LayerNorm,
}
impl ClipEncoderLayer {
fn new(vs: candle_nn::VarBuilder, c: &EncoderConfig) -> Result<Self> {
let self_attn = ClipAttention::new(vs.pp("self_attn"), c)?;
let layer_norm1 = candle_nn::layer_norm(c.embed_dim(), 1e-5, vs.pp("layer_norm1"))?;
let mlp = ClipMlp::new(vs.pp("mlp"), c)?;
let layer_norm2 = candle_nn::layer_norm(c.embed_dim(), 1e-5, vs.pp("layer_norm2"))?;
Ok(ClipEncoderLayer {
self_attn,
layer_norm1,
mlp,
layer_norm2,
})
}
fn forward(&self, xs: &Tensor, causal_attention_mask: Option<&Tensor>) -> Result<Tensor> {
let residual = xs;
let xs = self.layer_norm1.forward(xs)?;
let xs = self.self_attn.forward(&xs, causal_attention_mask)?;
let xs = (xs + residual)?;
let residual = &xs;
let xs = self.layer_norm2.forward(&xs)?;
let xs = self.mlp.forward(&xs)?;
xs + residual
}
}
#[derive(Clone, Debug)]
pub struct ClipEncoder {
layers: Vec<ClipEncoderLayer>,
}
impl ClipEncoder {
pub fn new(vs: candle_nn::VarBuilder, c: &EncoderConfig) -> Result<Self> {
let vs = vs.pp("layers");
let mut layers: Vec<ClipEncoderLayer> = Vec::new();
for index in 0..c.num_hidden_layers() {
let layer = ClipEncoderLayer::new(vs.pp(index.to_string()), c)?;
layers.push(layer)
}
Ok(ClipEncoder { layers })
}
pub fn forward(&self, xs: &Tensor, causal_attention_mask: Option<&Tensor>) -> Result<Tensor> {
let mut xs = xs.clone();
for layer in self.layers.iter() {
xs = layer.forward(&xs, causal_attention_mask)?;
}
Ok(xs)
}
// required by LLaVA
pub fn output_hidden_states(
&self,
xs: &Tensor,
causal_attention_mask: Option<&Tensor>,
) -> Result<Vec<Tensor>> {
let mut xs = xs.clone();
let mut hidden_states = Vec::new();
for layer in self.layers.iter() {
xs = layer.forward(&xs, causal_attention_mask)?;
hidden_states.push(xs.clone());
}
Ok(hidden_states)
}
}
/// A CLIP transformer based model.
#[derive(Clone, Debug)]
pub struct ClipTextTransformer {
embeddings: ClipTextEmbeddings,
encoder: ClipEncoder,
final_layer_norm: candle_nn::LayerNorm,
}
impl ClipTextTransformer {
pub fn new(vs: candle_nn::VarBuilder, c: &ClipTextConfig) -> Result<Self> {
let embeddings = ClipTextEmbeddings::new(vs.pp("embeddings"), c)?;
let encoder = ClipEncoder::new(vs.pp("encoder"), &EncoderConfig::Text(c.clone()))?;
let final_layer_norm = candle_nn::layer_norm(c.embed_dim, 1e-5, vs.pp("final_layer_norm"))?;
Ok(ClipTextTransformer {
embeddings,
encoder,
final_layer_norm,
})
}
// TODO: rewrite to newer version
fn build_causal_attention_mask(
bsz: usize,
seq_len: usize,
mask_after: usize,
device: &Device,
) -> Result<Tensor> {
let mask: Vec<_> = (0..seq_len)
.flat_map(|i| {
(0..seq_len).map(move |j| {
if j > i || j > mask_after {
f32::MIN
} else {
0.
}
})
})
.collect();
let mask = Tensor::from_slice(&mask, (seq_len, seq_len), device)?;
mask.broadcast_as((bsz, 1, seq_len, seq_len))
}
pub fn forward_with_mask(&self, input_ids: &Tensor, mask_after: usize) -> Result<Tensor> {
let (bsz, seq_len) = input_ids.dims2()?;
let input_ids = self.embeddings.forward(input_ids)?;
let causal_attention_mask =
Self::build_causal_attention_mask(bsz, seq_len, mask_after, input_ids.device())?;
let input_ids = self
.encoder
.forward(&input_ids, Some(&causal_attention_mask))?;
self.final_layer_norm.forward(&input_ids)
}
}
impl Module for ClipTextTransformer {
fn forward(&self, input_ids: &Tensor) -> Result<Tensor> {
let output = self.forward_with_mask(input_ids, usize::MAX)?;
let sequence_max_indices = input_ids.argmax(D::Minus1)?.to_dtype(DType::I64)?;
let mut indices = Vec::new();
for (batch_idx, &seq_idx) in sequence_max_indices.to_vec1::<i64>()?.iter().enumerate() {
let index = output.i((batch_idx, seq_idx as usize))?.unsqueeze(0)?;
indices.push(index);
}
Tensor::cat(&indices, 0)
}
}
| 0 |
0 | hf_public_repos/candle/candle-transformers/src/models | hf_public_repos/candle/candle-transformers/src/models/clip/mod.rs | //! Contrastive Language-Image Pre-Training
//!
//! Contrastive Language-Image Pre-Training (CLIP) is an architecture trained on
//! pairs of images with related texts.
//!
//! - 💻 [GH Link](https://github.com/openai/CLIP)
//! - 💻 Transformers Python [reference implementation](https://github.com/huggingface/transformers/tree/f6fa0f0bf0796ac66f201f23bdb8585de1609add/src/transformers/models/clip)
//! - 🤗 [HF Model](https://huggingface.co/openai/clip-vit-large-patch14-336)
//!
use self::{
text_model::{Activation, ClipTextTransformer},
vision_model::ClipVisionTransformer,
};
use candle::{Result, Tensor, D};
pub mod text_model;
pub mod vision_model;
#[derive(Clone, Debug)]
pub struct ClipModel {
text_model: ClipTextTransformer,
vision_model: ClipVisionTransformer,
visual_projection: candle_nn::Linear,
text_projection: candle_nn::Linear,
logit_scale: Tensor,
}
#[derive(Clone, Debug)]
pub enum EncoderConfig {
Text(text_model::ClipTextConfig),
Vision(vision_model::ClipVisionConfig),
}
impl EncoderConfig {
pub fn embed_dim(&self) -> usize {
match self {
Self::Text(c) => c.embed_dim,
Self::Vision(c) => c.embed_dim,
}
}
pub fn num_attention_heads(&self) -> usize {
match self {
Self::Text(c) => c.num_attention_heads,
Self::Vision(c) => c.num_attention_heads,
}
}
pub fn intermediate_size(&self) -> usize {
match self {
Self::Text(c) => c.intermediate_size,
Self::Vision(c) => c.intermediate_size,
}
}
pub fn num_hidden_layers(&self) -> usize {
match self {
Self::Text(c) => c.num_hidden_layers,
Self::Vision(c) => c.num_hidden_layers,
}
}
pub fn activation(&self) -> Activation {
match self {
Self::Text(_c) => Activation::QuickGelu,
Self::Vision(c) => c.activation,
}
}
}
#[derive(Clone, Debug)]
pub struct ClipConfig {
pub text_config: text_model::ClipTextConfig,
pub vision_config: vision_model::ClipVisionConfig,
pub logit_scale_init_value: f32,
pub image_size: usize,
}
impl ClipConfig {
// base image size is 224, model size is 600Mb
pub fn vit_base_patch32() -> Self {
let text_config = text_model::ClipTextConfig::vit_base_patch32();
let vision_config = vision_model::ClipVisionConfig::vit_base_patch32();
Self {
text_config,
vision_config,
logit_scale_init_value: 2.6592,
image_size: 224,
}
}
}
impl ClipModel {
pub fn new(vs: candle_nn::VarBuilder, c: &ClipConfig) -> Result<Self> {
let text_model = ClipTextTransformer::new(vs.pp("text_model"), &c.text_config)?;
let vision_model = ClipVisionTransformer::new(vs.pp("vision_model"), &c.vision_config)?;
let visual_projection = candle_nn::linear_no_bias(
c.vision_config.embed_dim,
c.vision_config.projection_dim,
vs.pp("visual_projection"),
)?;
let text_projection = candle_nn::linear_no_bias(
c.text_config.embed_dim,
c.text_config.projection_dim,
vs.pp("text_projection"),
)?;
// originally nn.Parameter
let logit_scale = if vs.contains_tensor("logit_scale") {
vs.get(&[], "logit_scale")?
} else {
Tensor::new(&[c.logit_scale_init_value], vs.device())?
};
Ok(Self {
text_model,
vision_model,
visual_projection,
text_projection,
logit_scale,
})
}
pub fn get_text_features(&self, input_ids: &Tensor) -> Result<Tensor> {
input_ids
.apply(&self.text_model)?
.apply(&self.text_projection)
}
pub fn get_image_features(&self, pixel_values: &Tensor) -> Result<Tensor> {
pixel_values
.apply(&self.vision_model)?
.apply(&self.visual_projection)
}
pub fn forward(&self, pixel_values: &Tensor, input_ids: &Tensor) -> Result<(Tensor, Tensor)> {
let image_features = self.get_image_features(pixel_values)?;
let text_features = self.get_text_features(input_ids)?;
let image_features_normalized = div_l2_norm(&image_features)?;
let text_features_normalized = div_l2_norm(&text_features)?;
let logits_per_text = text_features_normalized.matmul(&image_features_normalized.t()?)?;
let logit_scale = self.logit_scale.exp()?;
let logits_per_text = logits_per_text.broadcast_mul(&logit_scale)?;
let logits_per_image = logits_per_text.t()?;
Ok((logits_per_text, logits_per_image))
}
}
pub fn div_l2_norm(v: &Tensor) -> Result<Tensor> {
let l2_norm = v.sqr()?.sum_keepdim(D::Minus1)?.sqrt()?;
v.broadcast_div(&l2_norm)
}
| 1 |
0 | hf_public_repos/candle/candle-transformers/src/models | hf_public_repos/candle/candle-transformers/src/models/clip/vision_model.rs | //! Contrastive Language-Image Pre-Training
//!
//! Contrastive Language-Image Pre-Training (CLIP) is an architecture trained on
//! pairs of images with related texts.
//!
//! https://github.com/openai/CLIP
//! https://github.com/huggingface/transformers/tree/f6fa0f0bf0796ac66f201f23bdb8585de1609add/src/transformers/models/clip
use candle::{IndexOp, Result, Shape, Tensor, D};
use candle_nn as nn;
use candle_nn::Module;
use nn::Conv2dConfig;
use super::{
text_model::{Activation, ClipEncoder},
EncoderConfig,
};
#[derive(Debug, Clone)]
pub struct ClipVisionConfig {
pub embed_dim: usize,
pub activation: Activation,
pub intermediate_size: usize,
pub num_hidden_layers: usize,
pub num_attention_heads: usize,
#[allow(dead_code)]
pub projection_dim: usize,
pub num_channels: usize,
pub image_size: usize,
pub patch_size: usize,
}
impl ClipVisionConfig {
// The config details can be found in the "vision_config" section of this json file:
// https://huggingface.co/openai/clip-vit-large-patch14/blob/main/config.json
pub fn vit_base_patch32() -> Self {
Self {
embed_dim: 768,
activation: Activation::QuickGelu,
intermediate_size: 3072,
num_hidden_layers: 12,
num_attention_heads: 12,
projection_dim: 512,
num_channels: 3,
image_size: 224,
patch_size: 32,
}
}
pub fn clip_vit_large_patch14_336() -> Self {
Self {
embed_dim: 1024,
activation: Activation::QuickGelu,
intermediate_size: 4096,
num_hidden_layers: 24,
num_attention_heads: 16,
projection_dim: 768,
num_channels: 3,
image_size: 336,
patch_size: 14,
}
}
}
// https://github.com/huggingface/transformers/blob/f6fa0f0bf0796ac66f201f23bdb8585de1609add/src/transformers/models/clip/modeling_clip.py#L112
#[derive(Clone, Debug)]
struct ClipVisionEmbeddings {
patch_embedding: candle_nn::Conv2d,
position_ids: Tensor,
class_embedding: Tensor,
position_embedding: candle_nn::Embedding,
}
impl ClipVisionEmbeddings {
fn new(vs: candle_nn::VarBuilder, c: &ClipVisionConfig) -> Result<Self> {
// originally nn.Parameter
let class_embedding = if vs.contains_tensor("class_embedding") {
vs.get(c.embed_dim, "class_embedding")?
} else {
Tensor::randn(0f32, 1f32, c.embed_dim, vs.device())?
};
let num_patches = (c.image_size / c.patch_size).pow(2);
let num_positions = num_patches + 1;
let position_ids = Tensor::arange(0, num_positions as i64, vs.device())?;
let conv2dconfig = Conv2dConfig {
stride: c.patch_size,
..Default::default()
};
let position_embedding =
candle_nn::embedding(num_positions, c.embed_dim, vs.pp("position_embedding"))?;
let patch_embedding = candle_nn::conv2d_no_bias(
c.num_channels,
c.embed_dim,
c.patch_size,
conv2dconfig,
vs.pp("patch_embedding"),
)?;
Ok(Self {
patch_embedding,
position_ids,
class_embedding,
position_embedding,
})
}
}
impl Module for ClipVisionEmbeddings {
fn forward(&self, pixel_values: &Tensor) -> Result<Tensor> {
let batch_size = pixel_values.shape().dims();
let patch_embeds = self
.patch_embedding
.forward(pixel_values)?
.flatten_from(2)?
.transpose(1, 2)?;
let shape = Shape::from((batch_size[0], 1, self.class_embedding.dim(D::Minus1)?));
let class_embeds = self.class_embedding.expand(shape)?;
let embeddings = Tensor::cat(&[class_embeds, patch_embeds], 1)?;
let position_embedding = self.position_embedding.forward(&self.position_ids)?;
embeddings.broadcast_add(&position_embedding)
}
}
// https://github.com/huggingface/transformers/blob/f6fa0f0bf0796ac66f201f23bdb8585de1609add/src/transformers/models/clip/modeling_clip.py#L743
#[derive(Clone, Debug)]
pub struct ClipVisionTransformer {
embeddings: ClipVisionEmbeddings,
encoder: ClipEncoder,
pre_layer_norm: candle_nn::LayerNorm,
final_layer_norm: candle_nn::LayerNorm,
}
impl ClipVisionTransformer {
pub fn new(vs: candle_nn::VarBuilder, c: &ClipVisionConfig) -> Result<Self> {
let embeddings = ClipVisionEmbeddings::new(vs.pp("embeddings"), c)?;
let pre_layer_norm = candle_nn::layer_norm(c.embed_dim, 1e-5, vs.pp("pre_layrnorm"))?;
let encoder = ClipEncoder::new(vs.pp("encoder"), &EncoderConfig::Vision(c.clone()))?;
let final_layer_norm = candle_nn::layer_norm(c.embed_dim, 1e-5, vs.pp("post_layernorm"))?;
Ok(Self {
embeddings,
encoder,
final_layer_norm,
pre_layer_norm,
})
}
// required by LLaVA
pub fn output_hidden_states(&self, pixel_values: &Tensor) -> Result<Vec<Tensor>> {
let hidden_states = pixel_values
.apply(&self.embeddings)?
.apply(&self.pre_layer_norm)?;
let mut result = self.encoder.output_hidden_states(&hidden_states, None)?;
let encoder_outputs = result.last().unwrap();
let pooled_output = encoder_outputs.i((.., 0, ..))?;
result.push(self.final_layer_norm.forward(&pooled_output)?.clone());
Ok(result)
}
}
impl Module for ClipVisionTransformer {
fn forward(&self, pixel_values: &Tensor) -> Result<Tensor> {
let hidden_states = pixel_values
.apply(&self.embeddings)?
.apply(&self.pre_layer_norm)?;
let encoder_outputs = self.encoder.forward(&hidden_states, None)?;
// https://github.com/huggingface/transformers/blob/f6fa0f0bf0796ac66f201f23bdb8585de1609add/src/transformers/models/clip/modeling_clip.py#L787
// pooled_output = encoder_outputs[:, 0, :]
let pooled_output = encoder_outputs.i((.., 0, ..))?;
self.final_layer_norm.forward(&pooled_output)
}
}
| 2 |
0 | hf_public_repos/candle/candle-transformers | hf_public_repos/candle/candle-transformers/tests/generation_tests.rs | use candle::{Device, Result, Tensor};
use candle_transformers::generation::LogitsProcessor;
#[test]
fn sample_with_zero_temperature() -> Result<()> {
let mut logits_process = LogitsProcessor::new(1337, None, None);
let logits = Tensor::new(&[0.1, 0.2, 0.3, 0.4], &Device::Cpu)?;
let token = logits_process.sample(&logits)?;
assert_eq!(token, 3);
Ok(())
}
#[test]
fn sample_with_temperature() -> Result<()> {
let mut logits_process = LogitsProcessor::new(42, Some(0.9), None);
let logits = Tensor::new(&[0.1, 0.2, 0.3, 0.4], &Device::Cpu)?;
let token = logits_process.sample(&logits)?;
assert_eq!(token, 0);
Ok(())
}
#[test]
fn sample_with_top_p() -> Result<()> {
let mut logits_process = LogitsProcessor::new(42, Some(1.0), Some(0.5));
let logits = Tensor::new(&[0.1, 0.2, 0.3, 0.4], &Device::Cpu)?;
let token = logits_process.sample(&logits)?;
assert_eq!(token, 2);
Ok(())
}
#[test]
fn sample_with_top_k() -> Result<()> {
let mut logits_process = LogitsProcessor::from_sampling(
42,
candle_transformers::generation::Sampling::TopK {
k: 1,
temperature: 1.0,
},
);
let logits = Tensor::new(&[0.1, 0.2, 0.3, 0.4], &Device::Cpu)?;
let token = logits_process.sample(&logits)?;
assert_eq!(token, 3);
let mut logits_process = LogitsProcessor::from_sampling(
42,
candle_transformers::generation::Sampling::TopK {
k: 2,
temperature: 1.0,
},
);
let logits = Tensor::new(&[0.1, 0.2, 0.3, 0.4], &Device::Cpu)?;
let token = logits_process.sample(&logits)?;
assert_eq!(token, 3);
let token = logits_process.sample(&logits)?;
assert_eq!(token, 2);
Ok(())
}
| 3 |
0 | hf_public_repos/candle/candle-transformers | hf_public_repos/candle/candle-transformers/tests/nms_tests.rs | use candle::Result;
use candle_transformers::object_detection::{
non_maximum_suppression, soft_non_maximum_suppression, Bbox,
};
#[test]
fn nms_basic() -> Result<()> {
// Boxes based upon https://thepythoncode.com/article/non-maximum-suppression-using-opencv-in-python
let mut bboxes = vec![vec![
Bbox {
xmin: 245.0,
ymin: 305.0,
xmax: 575.0,
ymax: 490.0,
confidence: 0.9,
data: (),
}, // Box 1
Bbox {
xmin: 235.0,
ymin: 300.0,
xmax: 485.0,
ymax: 515.0,
confidence: 0.8,
data: (),
}, // Box 2
Bbox {
xmin: 305.0,
ymin: 270.0,
xmax: 540.0,
ymax: 500.0,
confidence: 0.6,
data: (),
}, // Box 3
]];
non_maximum_suppression(&mut bboxes, 0.5);
let bboxes = bboxes.into_iter().next().unwrap();
assert_eq!(bboxes.len(), 1);
assert_eq!(bboxes[0].confidence, 0.9);
Ok(())
}
#[test]
fn softnms_basic_functionality() -> Result<()> {
let mut bboxes = vec![vec![
Bbox {
xmin: 0.0,
ymin: 0.0,
xmax: 1.0,
ymax: 1.0,
confidence: 0.5,
data: (),
},
Bbox {
xmin: 0.1,
ymin: 0.1,
xmax: 1.1,
ymax: 1.1,
confidence: 0.9,
data: (),
},
Bbox {
xmin: 0.2,
ymin: 0.2,
xmax: 1.2,
ymax: 1.2,
confidence: 0.6,
data: (),
},
]];
soft_non_maximum_suppression(&mut bboxes, Some(0.5), Some(0.1), Some(0.5));
// Should decay boxes following highest confidence box
assert!(bboxes[0][0].confidence == 0.9);
assert!(bboxes[0][1].confidence < 0.5);
assert!(bboxes[0][2].confidence < 0.6);
Ok(())
}
#[test]
fn softnms_confidence_decay() -> Result<()> {
let mut bboxes = vec![vec![
Bbox {
xmin: 0.0,
ymin: 0.0,
xmax: 1.0,
ymax: 1.0,
confidence: 0.9,
data: (),
}, // Reference box
Bbox {
xmin: 0.1,
ymin: 0.1,
xmax: 1.1,
ymax: 1.1,
confidence: 0.8,
data: (),
}, // Overlapping box
]];
soft_non_maximum_suppression(&mut bboxes, Some(0.5), Some(0.1), Some(0.5));
// Check that confidence of the overlapping box is decayed
assert!(bboxes[0][0].confidence == 0.9);
assert!(bboxes[0][1].confidence < 0.8);
Ok(())
}
#[test]
fn softnms_confidence_threshold() -> Result<()> {
let mut bboxes = vec![vec![
Bbox {
xmin: 0.0,
ymin: 0.0,
xmax: 1.0,
ymax: 1.0,
confidence: 0.9,
data: (),
},
Bbox {
xmin: 0.1,
ymin: 0.1,
xmax: 1.1,
ymax: 1.1,
confidence: 0.05,
data: (),
},
]];
soft_non_maximum_suppression(&mut bboxes, Some(0.5), Some(0.1), Some(0.5));
// Box with confidence below the threshold should be removed
assert_eq!(bboxes[0].len(), 2);
assert_eq!(bboxes[0][0].confidence, 0.9);
assert_eq!(bboxes[0][1].confidence, 0.00);
Ok(())
}
#[test]
fn softnms_no_overlap() -> Result<()> {
let mut bboxes = vec![vec![
Bbox {
xmin: 0.0,
ymin: 0.0,
xmax: 1.0,
ymax: 1.0,
confidence: 0.9,
data: (),
},
Bbox {
xmin: 2.0,
ymin: 2.0,
xmax: 3.0,
ymax: 3.0,
confidence: 0.8,
data: (),
},
]];
soft_non_maximum_suppression(&mut bboxes, Some(0.5), Some(0.1), Some(0.5));
// Both boxes should remain as they do not significantly overlap
assert_eq!(bboxes[0].len(), 2);
assert_eq!(bboxes[0][0].confidence, 0.9);
assert_eq!(bboxes[0][1].confidence, 0.8);
Ok(())
}
#[test]
fn softnms_no_bbox() -> Result<()> {
let mut bboxes: Vec<Vec<Bbox<()>>> = vec![];
soft_non_maximum_suppression(&mut bboxes, Some(0.5), Some(0.1), Some(0.5));
assert!(bboxes.is_empty());
Ok(())
}
#[test]
fn softnms_single_bbox() -> Result<()> {
let mut bboxes = vec![vec![Bbox {
xmin: 0.0,
ymin: 0.0,
xmax: 1.0,
ymax: 1.0,
confidence: 0.9,
data: (),
}]];
soft_non_maximum_suppression(&mut bboxes, Some(0.5), Some(0.1), Some(0.5));
assert_eq!(bboxes[0].len(), 1);
Ok(())
}
#[test]
fn softnms_equal_confidence_overlap() -> Result<()> {
let mut bboxes = vec![vec![
Bbox {
xmin: 0.0,
ymin: 0.0,
xmax: 1.0,
ymax: 1.0,
confidence: 0.5,
data: (),
},
Bbox {
xmin: 0.1,
ymin: 0.1,
xmax: 1.1,
ymax: 1.1,
confidence: 0.5,
data: (),
},
]];
soft_non_maximum_suppression(&mut bboxes, Some(0.5), Some(0.1), Some(0.5));
// First box will be reference box, second box should be decayed
// Implementation must change to have both be decayed
assert_eq!(bboxes[0].len(), 2);
assert!(bboxes[0][0].confidence == 0.5);
assert!(bboxes[0][1].confidence < 0.5);
Ok(())
}
| 4 |
0 | hf_public_repos/candle | hf_public_repos/candle/candle-metal-kernels/Cargo.toml | [package]
name = "candle-metal-kernels"
version = "0.8.0"
edition = "2021"
description = "Metal kernels for Candle"
repository = "https://github.com/huggingface/candle"
keywords = ["blas", "tensor", "machine-learning"]
categories = ["science"]
license = "MIT OR Apache-2.0"
[dependencies]
metal = { version = "0.27.0", features = ["mps"] }
once_cell = "1.18.0"
thiserror = "1"
tracing = "0.1.37"
[dev-dependencies]
clap = { version = "4.2.4", features = ["derive"] }
half = { version = "2.3.1", features = [
"num-traits",
"use-intrinsics",
"rand_distr",
] }
anyhow = "1"
rand = "0.8.5"
rand_distr = "0.4.3"
| 5 |
0 | hf_public_repos/candle | hf_public_repos/candle/candle-metal-kernels/README.md | # candle-metal-kernels
This crate contains Metal kernels used from candle. | 6 |
0 | hf_public_repos/candle/candle-metal-kernels | hf_public_repos/candle/candle-metal-kernels/src/fill.metal | #include <metal_stdlib>
using namespace metal;
template<typename T> METAL_FUNC void fill_with(
device T *out,
constant float &value,
constant size_t &numel,
uint tid [[thread_position_in_grid]]
) {
if (tid >= numel) {
return;
}
out[tid] = static_cast<T>(value);
}
#define FILL_OP(NAME, T) \
kernel void fill_##NAME( \
device T *out, \
constant float &value, \
constant size_t &numel, \
uint tid [[thread_position_in_grid]] \
) { \
fill_with<T>(out, value, numel, tid); \
} \
#define FILL_OPS(NAME, T) \
FILL_OP(NAME, T) \
FILL_OPS(u8, uchar)
FILL_OPS(u32, uint)
FILL_OPS(i64, long)
FILL_OPS(f16, half)
FILL_OPS(f32, float)
#if __METAL_VERSION__ >= 310
FILL_OPS(bf16, bfloat)
#endif
| 7 |
0 | hf_public_repos/candle/candle-metal-kernels | hf_public_repos/candle/candle-metal-kernels/src/binary.metal | #include <metal_stdlib>
#define MAX(x, y) ((x) > (y) ? (x) : (y))
#define MIN(x, y) ((x) < (y) ? (x) : (y))
METAL_FUNC uint get_strided_index(
uint idx,
constant size_t &num_dims,
constant size_t *dims,
constant size_t *strides
) {
uint strided_i = 0;
for (uint d = 0; d < num_dims; d++) {
uint dim_idx = num_dims - 1 - d;
strided_i += (idx % dims[dim_idx]) * strides[dim_idx];
idx /= dims[dim_idx];
}
return strided_i;
}
using namespace metal;
#define BINARY(FN, TYPENAME, OUT_TYPENAME, FN_NAME, FN_NAME_STRIDED) \
kernel void FN_NAME( \
constant size_t &dim, \
device const TYPENAME *left, \
device const TYPENAME *right, \
device OUT_TYPENAME *output, \
uint tid [[ thread_position_in_grid ]] \
) { \
if (tid >= dim) { \
return; \
} \
TYPENAME x = left[tid]; \
TYPENAME y = right[tid]; \
output[tid] = OUT_TYPENAME(FN); \
}\
kernel void FN_NAME_STRIDED( \
constant size_t &dim, \
constant size_t &num_dims, \
constant size_t *dims, \
constant size_t *left_strides, \
constant size_t *right_strides, \
device const TYPENAME *left, \
device const TYPENAME *right, \
device OUT_TYPENAME *output, \
uint tid [[ thread_position_in_grid ]] \
) { \
if (tid >= dim) { \
return; \
} \
TYPENAME x = left[get_strided_index(tid, num_dims, dims, left_strides)]; \
TYPENAME y = right[get_strided_index(tid, num_dims, dims, right_strides)]; \
output[tid] = OUT_TYPENAME(FN); \
}
#define BINARY_OP(FN, NAME) \
BINARY(FN, float, float, NAME##_f32, NAME##_f32_strided); \
BINARY(FN, half, half, NAME##_f16, NAME##_f16_strided); \
BINARY(FN, uint32_t, uint32_t, NAME##_u32, NAME##_u32_strided); \
BINARY(FN, uint8_t, uint8_t, NAME##_u8, NAME##_u8_strided);
#define BINARY_OP_OUT(NAME, FN) \
BINARY(FN, float, uint8_t, NAME##_f32, NAME##_f32_strided); \
BINARY(FN, half, uint8_t, NAME##_f16, NAME##_f16_strided); \
BINARY(FN, uint32_t, uint8_t, NAME##_u32, NAME##_u32_strided); \
BINARY(FN, uint8_t, uint8_t, NAME##_u8, NAME##_u8_strided);
#define INT64_BINARY_OP(NAME, FN) \
BINARY(FN, int64_t, int64_t, NAME##_i64, NAME##_i64_strided);
#define INT64_BINARY_OP_OUT(NAME, FN) \
BINARY(FN, int64_t, uint8_t, NAME##_i64, NAME##_i64_strided);
#define BFLOAT_BINARY_OP(FN, NAME) \
BINARY(FN, bfloat, bfloat, NAME##_bf16, NAME##_bf16_strided);
#define BFLOAT_BINARY_OP_OUT(NAME, FN) \
BINARY(FN, bfloat, uint8_t, NAME##_bf16, NAME##_bf16_strided);
BINARY_OP(x + y, add)
BINARY_OP(x - y, sub)
BINARY_OP(x * y, mul)
BINARY_OP(x / y, div)
BINARY_OP(MIN(x, y), min)
BINARY_OP(MAX(x, y), max)
BINARY_OP_OUT(eq, x == y)
BINARY_OP_OUT(ne, x != y)
BINARY_OP_OUT(le, x <= y)
BINARY_OP_OUT(lt, x < y)
BINARY_OP_OUT(ge, x >= y)
BINARY_OP_OUT(gt, x > y)
#if __METAL_VERSION__ >= 220
INT64_BINARY_OP(add, x + y)
INT64_BINARY_OP(sub, x - y)
INT64_BINARY_OP(mul, x * y)
INT64_BINARY_OP(div, x / y)
INT64_BINARY_OP(min, MIN(x, y))
INT64_BINARY_OP(max, MAX(x, y))
INT64_BINARY_OP_OUT(eq, x == y)
INT64_BINARY_OP_OUT(ne, x != y)
INT64_BINARY_OP_OUT(le, x <= y)
INT64_BINARY_OP_OUT(lt, x < y)
INT64_BINARY_OP_OUT(ge, x >= y)
INT64_BINARY_OP_OUT(gt, x > y)
#endif
#if defined(__HAVE_BFLOAT__)
BFLOAT_BINARY_OP(x + y, add)
BFLOAT_BINARY_OP(x - y, sub)
BFLOAT_BINARY_OP(x * y, mul)
BFLOAT_BINARY_OP(x / y, div)
BFLOAT_BINARY_OP(MIN(x, y), min)
BFLOAT_BINARY_OP(MAX(x, y), max)
BFLOAT_BINARY_OP_OUT(eq, x == y)
BFLOAT_BINARY_OP_OUT(ne, x != y)
BFLOAT_BINARY_OP_OUT(le, x <= y)
BFLOAT_BINARY_OP_OUT(lt, x < y)
BFLOAT_BINARY_OP_OUT(ge, x >= y)
BFLOAT_BINARY_OP_OUT(gt, x > y)
#endif
| 8 |
0 | hf_public_repos/candle/candle-metal-kernels | hf_public_repos/candle/candle-metal-kernels/src/lib.rs | use metal::{
Buffer, CompileOptions, ComputeCommandEncoderRef, ComputePipelineState, Device, Function,
FunctionConstantValues, Library, MTLDataType, MTLSize, NSUInteger,
};
use std::collections::HashMap;
use std::ffi::c_void;
use std::sync::RwLock;
pub mod utils;
pub use utils::BufferOffset;
use utils::{get_block_dims, linear_split, EncoderParam, EncoderProvider};
const AFFINE: &str = include_str!("affine.metal");
const BINARY: &str = include_str!("binary.metal");
const CAST: &str = include_str!("cast.metal");
const CONV: &str = include_str!("conv.metal");
const FILL: &str = include_str!("fill.metal");
const INDEXING: &str = include_str!("indexing.metal");
// Current source: https://github.com/ivarflakstad/metal-flash-attention/tree/candle
const MFA: &[u8] = include_bytes!("libMetalFlashAttention.metallib");
const MLX_GEMM: &str = include_str!("mlx_gemm.metal");
const QUANTIZED: &str = include_str!("quantized.metal");
const RANDOM: &str = include_str!("random.metal");
const REDUCE: &str = include_str!("reduce.metal");
const SORT: &str = include_str!("sort.metal");
const TERNARY: &str = include_str!("ternary.metal");
const UNARY: &str = include_str!("unary.metal");
const SDPA: &str = include_str!("scaled_dot_product_attention.metal");
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum Source {
Affine,
Binary,
Cast,
Conv,
Fill,
Gemm,
Indexing,
Mfa,
Quantized,
Random,
Reduce,
Sort,
Ternary,
Unary,
Sdpa,
}
pub mod copy2d {
pub struct Kernel(pub &'static str);
pub const FLOAT: Kernel = Kernel("copy2d_f32");
pub const HALF: Kernel = Kernel("copy2d_f16");
pub const BFLOAT: Kernel = Kernel("copy2d_bf16");
pub const I64: Kernel = Kernel("copy2d_i64");
pub const U32: Kernel = Kernel("copy2d_u32");
pub const U8: Kernel = Kernel("copy2d_u8");
}
macro_rules! ops{
($($name:ident),+) => {
pub mod contiguous {
pub struct Kernel(pub &'static str);
$(
pub mod $name {
use super::Kernel;
pub const FLOAT: Kernel = Kernel(concat!(stringify!($name), "_f32"));
pub const HALF: Kernel = Kernel(concat!(stringify!($name), "_f16"));
pub const BFLOAT: Kernel = Kernel(concat!(stringify!($name), "_bf16"));
pub const I64: Kernel = Kernel(concat!(stringify!($name), "_i64"));
pub const U32: Kernel = Kernel(concat!(stringify!($name), "_u32"));
pub const U8: Kernel = Kernel(concat!(stringify!($name), "_u8"));
}
)+
pub mod copy {
use super::Kernel;
pub const FLOAT: Kernel = Kernel("copy_f32");
pub const HALF: Kernel = Kernel("copy_f16");
pub const BFLOAT: Kernel = Kernel("copy_bf16");
pub const I64: Kernel = Kernel("copy_i64");
pub const U32: Kernel = Kernel("copy_u32");
pub const U8: Kernel = Kernel("copy_u8");
}
}
pub mod contiguous_tiled {
pub struct Kernel(pub &'static str);
$(
pub mod $name {
use super::Kernel;
pub const FLOAT: Kernel = Kernel(concat!(stringify!($name), "_f32_tiled"));
pub const HALF: Kernel = Kernel(concat!(stringify!($name), "_f16_tiled"));
pub const BFLOAT: Kernel = Kernel(concat!(stringify!($name), "_bf16_tiled"));
pub const I64: Kernel = Kernel(concat!(stringify!($name), "_i64_tiled"));
pub const U32: Kernel = Kernel(concat!(stringify!($name), "_u32_tiled"));
pub const U8: Kernel = Kernel(concat!(stringify!($name), "_u8_tiled"));
}
)+
pub mod copy {
use super::Kernel;
pub const FLOAT: Kernel = Kernel("copy_f32_tiled");
pub const HALF: Kernel = Kernel("copy_f16_tiled");
pub const BFLOAT: Kernel = Kernel("copy_bf16_tiled");
pub const I64: Kernel = Kernel("copy_i64_tiled");
pub const U32: Kernel = Kernel("copy_u32_tiled");
pub const U8: Kernel = Kernel("copy_u8_tiled");
}
}
pub mod strided {
pub struct Kernel(pub &'static str);
$(
pub mod $name {
use super::Kernel;
pub const FLOAT: Kernel = Kernel(concat!(stringify!($name), "_f32_strided"));
pub const HALF: Kernel = Kernel(concat!(stringify!($name), "_f16_strided"));
pub const BFLOAT: Kernel = Kernel(concat!(stringify!($name), "_bf16_strided"));
pub const I64: Kernel = Kernel(concat!(stringify!($name), "_i64_strided"));
pub const U32: Kernel = Kernel(concat!(stringify!($name), "_u32_strided"));
pub const U8: Kernel = Kernel(concat!(stringify!($name), "_u8_strided"));
}
)+
pub mod copy {
use super::Kernel;
pub const FLOAT: Kernel = Kernel("copy_f32_strided");
pub const HALF: Kernel = Kernel("copy_f16_strided");
pub const BFLOAT: Kernel = Kernel("copy_bf16_strided");
pub const I64: Kernel = Kernel("copy_i64_strided");
pub const U32: Kernel = Kernel("copy_u32_strided");
pub const U8: Kernel = Kernel("copy_u8_strided");
}
}
};
}
pub mod unary {
ops!(
cos, sin, exp, sqr, sqrt, neg, log, gelu, abs, ceil, floor, relu, round, erf, gelu_erf,
tanh, recip, silu, sign, sigmoid
);
}
pub mod binary {
ops!(add, sub, mul, div, min, max, eq, ne, le, lt, ge, gt);
}
#[derive(thiserror::Error, Debug)]
pub enum MetalKernelError {
#[error("Could not lock kernel map: {0}")]
LockError(String),
#[error("Error while loading library: {0}")]
LoadLibraryError(String),
#[error("Error while loading function: {0:?}")]
LoadFunctionError(String),
#[error("Failed to create compute function")]
FailedToCreateComputeFunction,
#[error("Failed to create pipeline")]
FailedToCreatePipeline(String),
#[error("Invalid matmul arguments {lhs_stride:?} {rhs_stride:?} {mnk:?}")]
MatMulNonContiguous {
lhs_stride: Vec<usize>,
rhs_stride: Vec<usize>,
mnk: (usize, usize, usize),
},
#[error("Sdpa {variation} head size was {got}, expectd {expected:?}")]
SdpaHeadSizeMismatch {
variation: &'static str,
got: usize,
expected: Vec<usize>,
},
#[error("Sdpa {variation} got dtype {got:?}")]
SdpaHeadDTypeMismatch {
variation: &'static str,
got: SdpaDType,
},
}
impl<T> From<std::sync::PoisonError<T>> for MetalKernelError {
fn from(e: std::sync::PoisonError<T>) -> Self {
Self::LockError(e.to_string())
}
}
type Libraries = HashMap<Source, Library>;
type Pipelines = HashMap<(&'static str, Option<ConstantValues>), ComputePipelineState>;
#[derive(Debug)]
pub struct Kernels {
libraries: RwLock<Libraries>,
pipelines: RwLock<Pipelines>,
}
impl Default for Kernels {
fn default() -> Self {
Self::new()
}
}
impl Kernels {
pub fn new() -> Self {
let libraries = RwLock::new(Libraries::new());
let pipelines = RwLock::new(Pipelines::new());
Self {
libraries,
pipelines,
}
}
fn get_library_source(&self, source: Source) -> &'static str {
match source {
Source::Affine => AFFINE,
Source::Binary => BINARY,
Source::Cast => CAST,
Source::Conv => CONV,
Source::Fill => FILL,
Source::Gemm => MLX_GEMM,
Source::Indexing => INDEXING,
Source::Quantized => QUANTIZED,
Source::Random => RANDOM,
Source::Reduce => REDUCE,
Source::Sort => SORT,
Source::Ternary => TERNARY,
Source::Unary => UNARY,
Source::Sdpa => SDPA,
Source::Mfa => panic!("Invalid lib"),
}
}
/// Load the give library from its [`source`].
/// If this has been previously loaded it will just fetch it from cache.
pub fn load_library(
&self,
device: &Device,
source: Source,
) -> Result<Library, MetalKernelError> {
let mut libraries = self.libraries.write()?;
if let Some(lib) = libraries.get(&source) {
Ok(lib.clone())
} else {
let lib = match source {
Source::Mfa => {
let source_data = MFA;
device.new_library_with_data(source_data).map_err(|e| {
MetalKernelError::LoadLibraryError(format!(
"Candle metal requires macosx > 13.0 or higher, cannot load mfa: {e}"
))
})?
}
source => {
let source_content = self.get_library_source(source);
device
.new_library_with_source(source_content, &CompileOptions::new())
.map_err(|e| MetalKernelError::LoadLibraryError(e.to_string()))?
}
};
libraries.insert(source, lib.clone());
Ok(lib)
}
}
fn load_function(
&self,
device: &Device,
source: Source,
name: &'static str,
constants: Option<FunctionConstantValues>,
) -> Result<Function, MetalKernelError> {
let func = self
.load_library(device, source)?
.get_function(name, constants)
.map_err(|e| MetalKernelError::LoadFunctionError(e.to_string()))?;
Ok(func)
}
/// Load the give pipeline
/// loads the library from source, then gets the function [`name`] from
/// that source
fn load_pipeline_with_constants(
&self,
device: &Device,
source: Source,
name: &'static str,
constants: Option<ConstantValues>,
) -> Result<ComputePipelineState, MetalKernelError> {
let mut pipelines = self.pipelines.write()?;
let key = (name, constants);
if let Some(pipeline) = pipelines.get(&key) {
Ok(pipeline.clone())
} else {
let (name, constants) = key;
let func = self.load_function(
device,
source,
name,
constants.as_ref().map(|c| c.function_constant_values()),
)?;
let pipeline = device
.new_compute_pipeline_state_with_function(&func)
.map_err(|e| MetalKernelError::FailedToCreatePipeline(e.to_string()))?;
pipelines.insert((name, constants), pipeline.clone());
Ok(pipeline)
}
}
/// Load the give pipeline
/// loads the library from source, then gets the function [`name`] from
/// that source (without constants)
pub fn load_pipeline(
&self,
device: &Device,
source: Source,
name: &'static str,
) -> Result<ComputePipelineState, MetalKernelError> {
self.load_pipeline_with_constants(device, source, name, None)
}
}
#[allow(clippy::too_many_arguments)]
pub fn call_copy2d(
device: &Device,
ep: impl EncoderProvider,
kernels: &Kernels,
name: copy2d::Kernel,
input: &Buffer,
output: &Buffer,
d1: usize,
d2: usize,
src_s: usize,
dst_s: usize,
src_o_in_bytes: usize,
dst_o_in_bytes: usize,
) -> Result<(), MetalKernelError> {
let pipeline = kernels.load_pipeline(device, Source::Unary, name.0)?;
let encoder = ep.encoder();
let encoder: &ComputeCommandEncoderRef = encoder.as_ref();
encoder.set_compute_pipeline_state(&pipeline);
set_params!(
encoder,
(
d1 as i64,
d2 as i64,
src_s as i64,
dst_s as i64,
(input, src_o_in_bytes),
(output, dst_o_in_bytes)
)
);
let grid_dims = MTLSize {
width: d1 as u64,
height: d2 as u64,
depth: 1,
};
let group_dims = get_block_dims(d1 as u64, d2 as u64, 1);
encoder.use_resource(input, metal::MTLResourceUsage::Read);
encoder.use_resource(output, metal::MTLResourceUsage::Write);
encoder.dispatch_threads(grid_dims, group_dims);
Ok(())
}
#[allow(clippy::too_many_arguments)]
pub fn call_unary_contiguous_tiled(
device: &Device,
ep: impl EncoderProvider,
kernels: &Kernels,
kernel_name: unary::contiguous_tiled::Kernel,
length: usize,
input: BufferOffset,
output: &Buffer,
) -> Result<(), MetalKernelError> {
let pipeline = kernels.load_pipeline(device, Source::Unary, kernel_name.0)?;
let encoder = ep.encoder();
let encoder: &ComputeCommandEncoderRef = encoder.as_ref();
let tile_size = 2;
let tiles = length.div_ceil(tile_size);
encoder.set_compute_pipeline_state(&pipeline);
set_params!(encoder, (length, &input, output));
let (thread_group_count, thread_group_size) = linear_split(&pipeline, tiles);
encoder.use_resource(input.buffer, metal::MTLResourceUsage::Read);
encoder.use_resource(output, metal::MTLResourceUsage::Write);
encoder.dispatch_thread_groups(thread_group_count, thread_group_size);
Ok(())
}
#[allow(clippy::too_many_arguments)]
pub fn call_unary_contiguous(
device: &Device,
ep: impl EncoderProvider,
kernels: &Kernels,
kernel_name: unary::contiguous::Kernel,
length: usize,
input: BufferOffset,
output: &Buffer,
) -> Result<(), MetalKernelError> {
let pipeline = kernels.load_pipeline(device, Source::Unary, kernel_name.0)?;
let encoder = ep.encoder();
let encoder: &ComputeCommandEncoderRef = encoder.as_ref();
encoder.set_compute_pipeline_state(&pipeline);
set_params!(encoder, (length, &input, output));
let (thread_group_count, thread_group_size) = linear_split(&pipeline, length);
encoder.use_resource(input.buffer, metal::MTLResourceUsage::Read);
encoder.use_resource(output, metal::MTLResourceUsage::Write);
encoder.dispatch_thread_groups(thread_group_count, thread_group_size);
Ok(())
}
#[allow(clippy::too_many_arguments)]
pub fn call_unary_strided(
device: &Device,
ep: impl EncoderProvider,
kernels: &Kernels,
name: unary::strided::Kernel,
shape: &[usize],
input: BufferOffset,
strides: &[usize],
output: BufferOffset,
) -> Result<(), MetalKernelError> {
let pipeline = kernels.load_pipeline(device, Source::Unary, name.0)?;
let length: usize = shape.iter().product();
let num_dims: usize = shape.len();
let encoder = ep.encoder();
let encoder: &ComputeCommandEncoderRef = encoder.as_ref();
let (thread_group_count, thread_group_size) = linear_split(&pipeline, length);
encoder.set_compute_pipeline_state(&pipeline);
set_params!(encoder, (length, num_dims, shape, strides, &input, &output));
encoder.use_resource(input.buffer, metal::MTLResourceUsage::Read);
encoder.use_resource(output.buffer, metal::MTLResourceUsage::Write);
encoder.dispatch_thread_groups(thread_group_count, thread_group_size);
Ok(())
}
#[allow(clippy::too_many_arguments)]
pub fn call_binary_contiguous(
device: &Device,
ep: impl EncoderProvider,
kernels: &Kernels,
kernel_name: binary::contiguous::Kernel,
length: usize,
left: BufferOffset,
right: BufferOffset,
output: &Buffer,
) -> Result<(), MetalKernelError> {
let pipeline = kernels.load_pipeline(device, Source::Binary, kernel_name.0)?;
let encoder = ep.encoder();
let encoder: &ComputeCommandEncoderRef = encoder.as_ref();
encoder.set_compute_pipeline_state(&pipeline);
set_params!(encoder, (length, &left, &right, output));
let (thread_group_count, thread_group_size) = linear_split(&pipeline, length);
encoder.use_resource(left.buffer, metal::MTLResourceUsage::Read);
encoder.use_resource(right.buffer, metal::MTLResourceUsage::Read);
encoder.use_resource(output, metal::MTLResourceUsage::Write);
encoder.dispatch_thread_groups(thread_group_count, thread_group_size);
Ok(())
}
#[allow(clippy::too_many_arguments)]
pub fn call_binary_strided(
device: &Device,
ep: impl EncoderProvider,
kernels: &Kernels,
name: binary::strided::Kernel,
shape: &[usize],
left_input: BufferOffset,
left_strides: &[usize],
right_input: BufferOffset,
right_strides: &[usize],
output: &Buffer,
) -> Result<(), MetalKernelError> {
let pipeline = kernels.load_pipeline(device, Source::Binary, name.0)?;
let num_dims: usize = shape.len();
let encoder = ep.encoder();
let encoder: &ComputeCommandEncoderRef = encoder.as_ref();
let width: usize = shape.iter().product();
let length: usize = shape.iter().product();
let (thread_group_count, thread_group_size) = linear_split(&pipeline, width);
encoder.set_compute_pipeline_state(&pipeline);
set_params!(
encoder,
(
length,
num_dims,
shape,
left_strides,
right_strides,
&left_input,
&right_input,
output
)
);
encoder.use_resource(left_input.buffer, metal::MTLResourceUsage::Read);
encoder.use_resource(right_input.buffer, metal::MTLResourceUsage::Read);
encoder.use_resource(output, metal::MTLResourceUsage::Write);
encoder.dispatch_thread_groups(thread_group_count, thread_group_size);
Ok(())
}
#[allow(clippy::too_many_arguments)]
pub fn call_cast_contiguous(
device: &Device,
ep: impl EncoderProvider,
kernels: &Kernels,
kernel_name: &'static str,
length: usize,
input: BufferOffset,
output: &Buffer,
) -> Result<(), MetalKernelError> {
let pipeline = kernels.load_pipeline(device, Source::Cast, kernel_name)?;
let encoder = ep.encoder();
let encoder: &ComputeCommandEncoderRef = encoder.as_ref();
encoder.set_compute_pipeline_state(&pipeline);
set_params!(encoder, (length, &input, output));
let (thread_group_count, thread_group_size) = linear_split(&pipeline, length);
encoder.use_resource(input.buffer, metal::MTLResourceUsage::Read);
encoder.use_resource(output, metal::MTLResourceUsage::Write);
encoder.dispatch_thread_groups(thread_group_count, thread_group_size);
Ok(())
}
#[allow(clippy::too_many_arguments)]
pub fn call_cast_strided(
device: &Device,
ep: impl EncoderProvider,
kernels: &Kernels,
kernel_name: &'static str,
shape: &[usize],
input: BufferOffset,
input_strides: &[usize],
output: &Buffer,
) -> Result<(), MetalKernelError> {
let pipeline = kernels.load_pipeline(device, Source::Cast, kernel_name)?;
let encoder = ep.encoder();
let encoder: &ComputeCommandEncoderRef = encoder.as_ref();
encoder.set_compute_pipeline_state(&pipeline);
let length: usize = shape.iter().product();
set_params!(
encoder,
(length, shape.len(), shape, input_strides, &input, output)
);
let (thread_group_count, thread_group_size) = linear_split(&pipeline, length);
encoder.use_resource(input.buffer, metal::MTLResourceUsage::Read);
encoder.use_resource(output, metal::MTLResourceUsage::Write);
encoder.dispatch_thread_groups(thread_group_count, thread_group_size);
Ok(())
}
#[allow(clippy::too_many_arguments)]
pub fn call_reduce_contiguous(
device: &Device,
ep: impl EncoderProvider,
kernels: &Kernels,
kernel_name: &'static str,
length: usize,
out_length: usize,
input: BufferOffset,
output: &Buffer,
) -> Result<(), MetalKernelError> {
let pipeline = kernels.load_pipeline(device, Source::Reduce, kernel_name)?;
let elements_to_sum = length / out_length;
let encoder = ep.encoder();
let encoder: &ComputeCommandEncoderRef = encoder.as_ref();
encoder.set_compute_pipeline_state(&pipeline);
set_params!(encoder, (length, elements_to_sum, &input, output));
let thread_group_count = MTLSize {
width: out_length as u64,
height: 1,
depth: 1,
};
let width = std::cmp::min(
pipeline.max_total_threads_per_threadgroup(),
(elements_to_sum as u64).div_ceil(2),
)
.next_power_of_two();
let thread_group_size = MTLSize {
width,
height: 1,
depth: 1,
};
encoder.use_resource(input.buffer, metal::MTLResourceUsage::Read);
encoder.use_resource(output, metal::MTLResourceUsage::Write);
encoder.dispatch_thread_groups(thread_group_count, thread_group_size);
Ok(())
}
#[allow(clippy::too_many_arguments)]
pub fn call_reduce_strided(
device: &Device,
ep: impl EncoderProvider,
kernels: &Kernels,
kernel_name: &'static str,
shape: &[usize],
strides: &[usize],
out_length: usize,
input: BufferOffset,
output: &Buffer,
) -> Result<(), MetalKernelError> {
let length: usize = shape.iter().product();
let pipeline = kernels.load_pipeline(device, Source::Reduce, kernel_name)?;
let elements_to_sum = length / out_length;
let encoder = ep.encoder();
let encoder: &ComputeCommandEncoderRef = encoder.as_ref();
encoder.set_compute_pipeline_state(&pipeline);
set_params!(
encoder,
(shape.len(), shape, strides, elements_to_sum, &input, output)
);
let thread_group_count = MTLSize {
width: out_length as u64,
height: 1,
depth: 1,
};
let width = std::cmp::min(
pipeline.max_total_threads_per_threadgroup(),
elements_to_sum as u64,
)
.next_power_of_two();
let thread_group_size = MTLSize {
width,
height: 1,
depth: 1,
};
encoder.use_resource(input.buffer, metal::MTLResourceUsage::Read);
encoder.use_resource(output, metal::MTLResourceUsage::Write);
encoder.dispatch_thread_groups(thread_group_count, thread_group_size);
Ok(())
}
#[allow(clippy::too_many_arguments)]
pub fn call_last_softmax(
device: &Device,
ep: impl EncoderProvider,
kernels: &Kernels,
kernel_name: &'static str,
length: usize,
elements_to_sum: usize,
input: &Buffer,
input_offset: usize,
output: &Buffer,
) -> Result<(), MetalKernelError> {
let pipeline = kernels.load_pipeline(device, Source::Reduce, kernel_name)?;
let encoder = ep.encoder();
let encoder: &ComputeCommandEncoderRef = encoder.as_ref();
encoder.set_compute_pipeline_state(&pipeline);
set_params!(
encoder,
(length, elements_to_sum, (input, input_offset), output)
);
let out_length = length / elements_to_sum;
let thread_group_count = MTLSize {
width: out_length as u64,
height: 1,
depth: 1,
};
let width = std::cmp::min(
pipeline.max_total_threads_per_threadgroup(),
elements_to_sum as u64,
)
.next_power_of_two();
let thread_group_size = MTLSize {
width,
height: 1,
depth: 1,
};
encoder.use_resource(input, metal::MTLResourceUsage::Read);
encoder.use_resource(output, metal::MTLResourceUsage::Write);
encoder.dispatch_thread_groups(thread_group_count, thread_group_size);
Ok(())
}
#[allow(clippy::too_many_arguments)]
pub fn call_rms_norm(
device: &Device,
ep: impl EncoderProvider,
kernels: &Kernels,
kernel_name: &'static str,
length: usize,
elements_to_sum: usize,
eps: f32,
input: &Buffer,
input_offset: usize,
alpha: &Buffer,
alpha_offset: usize,
output: &Buffer,
) -> Result<(), MetalKernelError> {
let pipeline = kernels.load_pipeline(device, Source::Reduce, kernel_name)?;
let encoder = ep.encoder();
let encoder: &ComputeCommandEncoderRef = encoder.as_ref();
encoder.set_compute_pipeline_state(&pipeline);
set_params!(
encoder,
(
length,
elements_to_sum,
(input, input_offset),
output,
(alpha, alpha_offset),
eps
)
);
let out_length = length / elements_to_sum;
let thread_group_count = MTLSize {
width: out_length as u64,
height: 1,
depth: 1,
};
let width = std::cmp::min(
pipeline.max_total_threads_per_threadgroup(),
elements_to_sum as u64,
)
.next_power_of_two();
let thread_group_size = MTLSize {
width,
height: 1,
depth: 1,
};
encoder.use_resource(input, metal::MTLResourceUsage::Read);
encoder.use_resource(output, metal::MTLResourceUsage::Write);
encoder.set_threadgroup_memory_length(0, (width * 4).max(16) as u64);
encoder.dispatch_thread_groups(thread_group_count, thread_group_size);
Ok(())
}
#[allow(clippy::too_many_arguments)]
pub fn call_layer_norm(
device: &Device,
ep: impl EncoderProvider,
kernels: &Kernels,
kernel_name: &'static str,
length: usize,
elements_to_sum: usize,
eps: f32,
input: &Buffer,
input_offset: usize,
alpha: &Buffer,
alpha_offset: usize,
beta: &Buffer,
beta_offset: usize,
output: &Buffer,
) -> Result<(), MetalKernelError> {
let pipeline = kernels.load_pipeline(device, Source::Reduce, kernel_name)?;
let encoder = ep.encoder();
let encoder: &ComputeCommandEncoderRef = encoder.as_ref();
encoder.set_compute_pipeline_state(&pipeline);
set_params!(
encoder,
(
length,
elements_to_sum,
(input, input_offset),
output,
(alpha, alpha_offset),
(beta, beta_offset),
eps
)
);
let out_length = length / elements_to_sum;
let thread_group_count = MTLSize {
width: out_length as u64,
height: 1,
depth: 1,
};
let width = std::cmp::min(
pipeline.max_total_threads_per_threadgroup(),
elements_to_sum as u64,
)
.next_power_of_two();
let thread_group_size = MTLSize {
width,
height: 1,
depth: 1,
};
encoder.use_resource(input, metal::MTLResourceUsage::Read);
encoder.use_resource(output, metal::MTLResourceUsage::Write);
encoder.set_threadgroup_memory_length(0, (width * 8).max(32) as u64);
encoder.dispatch_thread_groups(thread_group_count, thread_group_size);
Ok(())
}
#[allow(clippy::too_many_arguments)]
pub fn call_rope_i(
device: &Device,
ep: impl EncoderProvider,
kernels: &Kernels,
kernel_name: &'static str,
bh: usize,
td: usize,
src: &Buffer,
src_offset: usize,
cos: &Buffer,
cos_offset: usize,
sin: &Buffer,
sin_offset: usize,
output: &Buffer,
) -> Result<(), MetalKernelError> {
let pipeline = kernels.load_pipeline(device, Source::Reduce, kernel_name)?;
let encoder = ep.encoder();
let encoder: &ComputeCommandEncoderRef = encoder.as_ref();
encoder.set_compute_pipeline_state(&pipeline);
set_params!(
encoder,
(
bh,
td,
(src, src_offset),
(cos, cos_offset),
(sin, sin_offset),
output
)
);
let (thread_group_count, thread_group_size) = linear_split(&pipeline, (bh * td) / 2);
encoder.use_resource(src, metal::MTLResourceUsage::Read);
encoder.use_resource(cos, metal::MTLResourceUsage::Read);
encoder.use_resource(sin, metal::MTLResourceUsage::Read);
encoder.use_resource(output, metal::MTLResourceUsage::Write);
encoder.dispatch_thread_groups(thread_group_count, thread_group_size);
Ok(())
}
#[allow(clippy::too_many_arguments)]
pub fn call_rope_thd(
device: &Device,
ep: impl EncoderProvider,
kernels: &Kernels,
kernel_name: &'static str,
b: usize,
t: usize,
h: usize,
d: usize,
src: &Buffer,
src_offset: usize,
cos: &Buffer,
cos_offset: usize,
sin: &Buffer,
sin_offset: usize,
output: &Buffer,
) -> Result<(), MetalKernelError> {
let pipeline = kernels.load_pipeline(device, Source::Reduce, kernel_name)?;
let encoder = ep.encoder();
let encoder: &ComputeCommandEncoderRef = encoder.as_ref();
encoder.set_compute_pipeline_state(&pipeline);
set_params!(
encoder,
(
b,
t,
h,
d,
(src, src_offset),
(cos, cos_offset),
(sin, sin_offset),
output
)
);
let (thread_group_count, thread_group_size) = linear_split(&pipeline, (b * t * h * d) / 2);
encoder.use_resource(src, metal::MTLResourceUsage::Read);
encoder.use_resource(cos, metal::MTLResourceUsage::Read);
encoder.use_resource(sin, metal::MTLResourceUsage::Read);
encoder.use_resource(output, metal::MTLResourceUsage::Write);
encoder.dispatch_thread_groups(thread_group_count, thread_group_size);
Ok(())
}
#[allow(clippy::too_many_arguments)]
pub fn call_rope(
device: &Device,
ep: impl EncoderProvider,
kernels: &Kernels,
kernel_name: &'static str,
bh: usize,
td: usize,
d: usize,
src: &Buffer,
src_offset: usize,
cos: &Buffer,
cos_offset: usize,
sin: &Buffer,
sin_offset: usize,
output: &Buffer,
) -> Result<(), MetalKernelError> {
let pipeline = kernels.load_pipeline(device, Source::Reduce, kernel_name)?;
let encoder = ep.encoder();
let encoder: &ComputeCommandEncoderRef = encoder.as_ref();
encoder.set_compute_pipeline_state(&pipeline);
set_params!(
encoder,
(
bh,
td,
d,
(src, src_offset),
(cos, cos_offset),
(sin, sin_offset),
output
)
);
let (thread_group_count, thread_group_size) = linear_split(&pipeline, (bh * td) / 2);
encoder.use_resource(src, metal::MTLResourceUsage::Read);
encoder.use_resource(cos, metal::MTLResourceUsage::Read);
encoder.use_resource(sin, metal::MTLResourceUsage::Read);
encoder.use_resource(output, metal::MTLResourceUsage::Write);
encoder.dispatch_thread_groups(thread_group_count, thread_group_size);
Ok(())
}
#[allow(clippy::too_many_arguments)]
pub fn call_affine(
device: &Device,
ep: impl EncoderProvider,
kernels: &Kernels,
name: &'static str,
size: usize,
input: BufferOffset,
output: &Buffer,
mul: f32,
add: f32,
) -> Result<(), MetalKernelError> {
let pipeline = kernels.load_pipeline(device, Source::Affine, name)?;
let encoder = ep.encoder();
let encoder: &ComputeCommandEncoderRef = encoder.as_ref();
encoder.set_compute_pipeline_state(&pipeline);
set_params!(encoder, (size, mul, add, &input, output));
let (thread_group_count, thread_group_size) = linear_split(&pipeline, size);
encoder.use_resource(input.buffer, metal::MTLResourceUsage::Read);
encoder.use_resource(output, metal::MTLResourceUsage::Write);
encoder.dispatch_thread_groups(thread_group_count, thread_group_size);
Ok(())
}
#[allow(clippy::too_many_arguments)]
pub fn call_affine_strided(
device: &Device,
ep: impl EncoderProvider,
kernels: &Kernels,
name: &'static str,
shape: &[usize],
input: BufferOffset,
input_stride: &[usize],
output: &Buffer,
mul: f32,
add: f32,
) -> Result<(), MetalKernelError> {
let pipeline = kernels.load_pipeline(device, Source::Affine, name)?;
let size: usize = shape.iter().product();
let encoder = ep.encoder();
let encoder: &ComputeCommandEncoderRef = encoder.as_ref();
encoder.set_compute_pipeline_state(&pipeline);
set_params!(
encoder,
(
size,
shape.len(),
shape,
input_stride,
mul,
add,
&input,
output
)
);
let (thread_group_count, thread_group_size) = linear_split(&pipeline, size);
encoder.use_resource(input.buffer, metal::MTLResourceUsage::Read);
encoder.use_resource(output, metal::MTLResourceUsage::Write);
encoder.dispatch_thread_groups(thread_group_count, thread_group_size);
Ok(())
}
#[allow(clippy::too_many_arguments)]
pub fn call_powf(
device: &Device,
ep: impl EncoderProvider,
kernels: &Kernels,
name: &'static str,
size: usize,
input: BufferOffset,
output: &Buffer,
mul: f32,
) -> Result<(), MetalKernelError> {
let pipeline = kernels.load_pipeline(device, Source::Affine, name)?;
let encoder = ep.encoder();
let encoder: &ComputeCommandEncoderRef = encoder.as_ref();
encoder.set_compute_pipeline_state(&pipeline);
set_params!(encoder, (size, mul, &input, output));
let (thread_group_count, thread_group_size) = linear_split(&pipeline, size);
encoder.use_resource(input.buffer, metal::MTLResourceUsage::Read);
encoder.use_resource(output, metal::MTLResourceUsage::Write);
encoder.dispatch_thread_groups(thread_group_count, thread_group_size);
Ok(())
}
#[allow(clippy::too_many_arguments)]
pub fn call_powf_strided(
device: &Device,
ep: impl EncoderProvider,
kernels: &Kernels,
name: &'static str,
shape: &[usize],
input: BufferOffset,
input_stride: &[usize],
output: &Buffer,
mul: f32,
) -> Result<(), MetalKernelError> {
let pipeline = kernels.load_pipeline(device, Source::Affine, name)?;
let size: usize = shape.iter().product();
let encoder = ep.encoder();
let encoder: &ComputeCommandEncoderRef = encoder.as_ref();
encoder.set_compute_pipeline_state(&pipeline);
set_params!(
encoder,
(size, shape.len(), shape, input_stride, mul, &input, output)
);
let (thread_group_count, thread_group_size) = linear_split(&pipeline, size);
encoder.use_resource(input.buffer, metal::MTLResourceUsage::Read);
encoder.use_resource(output, metal::MTLResourceUsage::Write);
encoder.dispatch_thread_groups(thread_group_count, thread_group_size);
Ok(())
}
#[allow(clippy::too_many_arguments)]
pub fn call_elu(
device: &Device,
ep: impl EncoderProvider,
kernels: &Kernels,
name: &'static str,
size: usize,
input: BufferOffset,
output: &Buffer,
mul: f32,
) -> Result<(), MetalKernelError> {
let pipeline = kernels.load_pipeline(device, Source::Affine, name)?;
let encoder = ep.encoder();
let encoder: &ComputeCommandEncoderRef = encoder.as_ref();
encoder.set_compute_pipeline_state(&pipeline);
set_params!(encoder, (size, mul, &input, output));
let (thread_group_count, thread_group_size) = linear_split(&pipeline, size);
encoder.use_resource(input.buffer, metal::MTLResourceUsage::Read);
encoder.use_resource(output, metal::MTLResourceUsage::Write);
encoder.dispatch_thread_groups(thread_group_count, thread_group_size);
Ok(())
}
#[allow(clippy::too_many_arguments)]
pub fn call_elu_strided(
device: &Device,
ep: impl EncoderProvider,
kernels: &Kernels,
name: &'static str,
shape: &[usize],
input: BufferOffset,
input_stride: &[usize],
output: &Buffer,
mul: f32,
) -> Result<(), MetalKernelError> {
let pipeline = kernels.load_pipeline(device, Source::Affine, name)?;
let size: usize = shape.iter().product();
let encoder = ep.encoder();
let encoder: &ComputeCommandEncoderRef = encoder.as_ref();
encoder.set_compute_pipeline_state(&pipeline);
set_params!(
encoder,
(size, shape.len(), shape, input_stride, mul, &input, output)
);
let (thread_group_count, thread_group_size) = linear_split(&pipeline, size);
encoder.use_resource(input.buffer, metal::MTLResourceUsage::Read);
encoder.use_resource(output, metal::MTLResourceUsage::Write);
encoder.dispatch_thread_groups(thread_group_count, thread_group_size);
Ok(())
}
#[allow(clippy::too_many_arguments)]
pub fn call_where_cond_strided(
device: &Device,
ep: impl EncoderProvider,
kernels: &Kernels,
name: &'static str,
shape: &[usize],
cond: BufferOffset,
cond_stride: &[usize],
left: BufferOffset,
left_stride: &[usize],
right: BufferOffset,
right_stride: &[usize],
output: &Buffer,
) -> Result<(), MetalKernelError> {
let pipeline = kernels.load_pipeline(device, Source::Ternary, name)?;
let encoder = ep.encoder();
let encoder: &ComputeCommandEncoderRef = encoder.as_ref();
encoder.set_compute_pipeline_state(&pipeline);
let size: usize = shape.iter().product();
let rank = shape.len();
set_params!(
encoder,
(
size,
rank,
shape,
cond_stride,
left_stride,
right_stride,
&cond,
&left,
&right,
output
)
);
let (thread_group_count, thread_group_size) = linear_split(&pipeline, size);
encoder.use_resource(cond.buffer, metal::MTLResourceUsage::Read);
encoder.use_resource(left.buffer, metal::MTLResourceUsage::Read);
encoder.use_resource(right.buffer, metal::MTLResourceUsage::Read);
encoder.use_resource(output, metal::MTLResourceUsage::Write);
encoder.dispatch_thread_groups(thread_group_count, thread_group_size);
Ok(())
}
#[allow(clippy::too_many_arguments)]
pub fn call_index_select(
device: &Device,
ep: impl EncoderProvider,
kernels: &Kernels,
name: &'static str,
shape: &[usize],
ids_size: usize,
dim: usize,
contiguous: bool,
src_dims: &[usize],
src_strides: &[usize],
input: BufferOffset,
ids: BufferOffset,
output: &Buffer,
) -> Result<(), MetalKernelError> {
let left_size: usize = shape[..dim].iter().product();
let right_size: usize = shape[dim + 1..].iter().product();
let src_dim_size = shape[dim];
let dst_el = ids_size * left_size * right_size;
let pipeline = kernels.load_pipeline(device, Source::Indexing, name)?;
let encoder = ep.encoder();
let encoder: &ComputeCommandEncoderRef = encoder.as_ref();
encoder.set_compute_pipeline_state(&pipeline);
set_params!(
encoder,
(
dst_el,
left_size,
src_dim_size,
right_size,
ids_size,
contiguous,
src_dims,
src_strides,
&input,
&ids,
output
)
);
let (thread_group_count, thread_group_size) = linear_split(&pipeline, dst_el);
encoder.use_resource(input.buffer, metal::MTLResourceUsage::Read);
encoder.use_resource(ids.buffer, metal::MTLResourceUsage::Read);
encoder.use_resource(output, metal::MTLResourceUsage::Write);
encoder.dispatch_thread_groups(thread_group_count, thread_group_size);
Ok(())
}
#[allow(clippy::too_many_arguments)]
pub fn call_gather(
device: &Device,
ep: impl EncoderProvider,
kernels: &Kernels,
name: &'static str,
shape: &[usize],
ids_size: usize,
dim: usize,
input: BufferOffset,
ids: BufferOffset,
output: &Buffer,
) -> Result<(), MetalKernelError> {
let left_size: usize = shape[..dim].iter().product();
let right_size: usize = shape[dim + 1..].iter().product();
let src_dim_size = shape[dim];
let dst_el = ids_size * left_size * right_size;
let pipeline = kernels.load_pipeline(device, Source::Indexing, name)?;
let encoder = ep.encoder();
let encoder: &ComputeCommandEncoderRef = encoder.as_ref();
encoder.set_compute_pipeline_state(&pipeline);
set_params!(
encoder,
(
dst_el,
left_size,
src_dim_size,
right_size,
ids_size,
&input,
&ids,
output
)
);
let (thread_group_count, thread_group_size) = linear_split(&pipeline, dst_el);
encoder.use_resource(input.buffer, metal::MTLResourceUsage::Read);
encoder.use_resource(ids.buffer, metal::MTLResourceUsage::Read);
encoder.use_resource(output, metal::MTLResourceUsage::Write);
encoder.dispatch_thread_groups(thread_group_count, thread_group_size);
Ok(())
}
#[allow(clippy::too_many_arguments)]
pub fn call_scatter_add(
device: &Device,
ep: impl EncoderProvider,
kernels: &Kernels,
name: &'static str,
src_shape: &[usize],
dst_shape: &[usize],
dim: usize,
input: BufferOffset,
ids: BufferOffset,
output: &Buffer,
) -> Result<(), MetalKernelError> {
let left_size: usize = src_shape[..dim].iter().product();
let right_size: usize = src_shape[dim + 1..].iter().product();
let src_dim_size = src_shape[dim];
let dst_el = left_size * right_size;
let dst_dim_size = dst_shape[dim];
let pipeline = kernels.load_pipeline(device, Source::Indexing, name)?;
let encoder = ep.encoder();
let encoder: &ComputeCommandEncoderRef = encoder.as_ref();
encoder.set_compute_pipeline_state(&pipeline);
set_params!(
encoder,
(
dst_el,
left_size,
src_dim_size,
right_size,
dst_dim_size,
&input,
&ids,
output
)
);
let (thread_group_count, thread_group_size) = linear_split(&pipeline, dst_el);
encoder.use_resource(input.buffer, metal::MTLResourceUsage::Read);
encoder.use_resource(ids.buffer, metal::MTLResourceUsage::Read);
encoder.use_resource(output, metal::MTLResourceUsage::Write);
encoder.dispatch_thread_groups(thread_group_count, thread_group_size);
Ok(())
}
#[allow(clippy::too_many_arguments)]
pub fn call_index_add(
device: &Device,
ep: impl EncoderProvider,
kernels: &Kernels,
name: &'static str,
src_shape: &[usize],
dst_shape: &[usize],
ids_shape: &[usize],
dim: usize,
input: BufferOffset,
ids: BufferOffset,
output: &Buffer,
) -> Result<(), MetalKernelError> {
let left_size: usize = src_shape[..dim].iter().product();
let right_size: usize = src_shape[dim + 1..].iter().product();
let src_dim_size = src_shape[dim];
let dst_el = left_size * right_size;
let dst_dim_size = dst_shape[dim];
let ids_dim_size = ids_shape[0];
let pipeline = kernels.load_pipeline(device, Source::Indexing, name)?;
let encoder = ep.encoder();
let encoder: &ComputeCommandEncoderRef = encoder.as_ref();
encoder.set_compute_pipeline_state(&pipeline);
set_params!(
encoder,
(
dst_el,
left_size,
src_dim_size,
right_size,
dst_dim_size,
ids_dim_size,
&input,
&ids,
output
)
);
let (thread_group_count, thread_group_size) = linear_split(&pipeline, dst_el);
encoder.use_resource(input.buffer, metal::MTLResourceUsage::Read);
encoder.use_resource(ids.buffer, metal::MTLResourceUsage::Read);
encoder.use_resource(output, metal::MTLResourceUsage::Write);
encoder.dispatch_thread_groups(thread_group_count, thread_group_size);
Ok(())
}
#[derive(Debug, PartialEq)]
pub enum Value {
USize(usize),
Bool(bool),
F32(f32),
U16(u16),
}
impl std::hash::Hash for Value {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
match self {
Value::F32(v) => v.to_bits().hash(state),
Value::USize(v) => v.hash(state),
Value::U16(v) => v.hash(state),
Value::Bool(v) => v.hash(state),
}
}
}
impl Value {
fn data_type(&self) -> MTLDataType {
match self {
Value::USize(_) => MTLDataType::UInt,
Value::F32(_) => MTLDataType::Float,
Value::U16(_) => MTLDataType::UShort,
Value::Bool(_) => MTLDataType::Bool,
}
}
}
/// Not true, good enough for our purposes.
impl Eq for Value {}
#[derive(Debug, Eq, PartialEq, Hash)]
struct ConstantValues(Vec<(usize, Value)>);
impl ConstantValues {
pub fn new(values: Vec<(usize, Value)>) -> Self {
Self(values)
}
fn function_constant_values(&self) -> FunctionConstantValues {
let f = FunctionConstantValues::new();
for (index, value) in &self.0 {
let ty = value.data_type();
match value {
Value::USize(v) => {
f.set_constant_value_at_index(
v as *const usize as *const c_void,
ty,
*index as u64,
);
}
Value::F32(v) => {
f.set_constant_value_at_index(
v as *const f32 as *const c_void,
ty,
*index as u64,
);
}
Value::U16(v) => {
f.set_constant_value_at_index(
v as *const u16 as *const c_void,
ty,
*index as u64,
);
}
Value::Bool(v) => {
f.set_constant_value_at_index(
v as *const bool as *const c_void,
ty,
*index as u64,
);
}
}
}
f
}
}
#[allow(clippy::too_many_arguments)]
pub fn call_gemm(
device: &Device,
ep: impl EncoderProvider,
kernels: &Kernels,
name: &'static str,
(b, m, n, k): (usize, usize, usize, usize),
lhs_stride: &[usize],
lhs_offset: usize,
lhs_buffer: &Buffer,
rhs_stride: &[usize],
rhs_offset: usize,
rhs_buffer: &Buffer,
output: &Buffer,
) -> Result<(), MetalKernelError> {
assert!(rhs_stride.len() >= 2);
assert!(lhs_stride.len() >= 2);
let rhs_m1 = rhs_stride[rhs_stride.len() - 1];
let rhs_m2 = rhs_stride[rhs_stride.len() - 2];
let lhs_m1 = lhs_stride[lhs_stride.len() - 1];
let lhs_m2 = lhs_stride[lhs_stride.len() - 2];
// lhs has shape b, m, k
// We also allow for the case where the stride on the minor dimension is not as expected but
// there is a single element.
let a_trans = if (lhs_m1 == 1 || k == 1) && (lhs_m2 == k || m == 1) {
false
} else if (lhs_m1 == m || k == 1) && (lhs_m2 == 1 || m == 1) {
true
} else {
return Err(MetalKernelError::MatMulNonContiguous {
lhs_stride: lhs_stride.to_vec(),
rhs_stride: rhs_stride.to_vec(),
mnk: (m, n, k),
})?;
};
// rhs has shape b, k, n
let b_trans = if (rhs_m1 == 1 || n == 1) && (rhs_m2 == n || k == 1) {
false
} else if (rhs_m1 == k || n == 1) && (rhs_m2 == 1 || k == 1) {
true
} else {
return Err(MetalKernelError::MatMulNonContiguous {
lhs_stride: lhs_stride.to_vec(),
rhs_stride: rhs_stride.to_vec(),
mnk: (m, n, k),
})?;
};
let d_trans = false;
let alpha = 1.0f32;
let beta = 0.0f32;
let batched = b > 1;
let fused_activation = false;
let fused_bias = false;
let (m_simd, n_simd, k_simd, m_splits, n_splits) = if m == 1 {
let m_simd = 8;
let n_simd = 8;
let k_simd = 64;
let m_splits = 1;
let n_splits = 1;
(m_simd, n_simd, k_simd, m_splits, n_splits)
} else {
let m_simd = 40;
let n_simd = 40;
let k_simd = 32;
let m_splits = 1;
let n_splits = 1;
(m_simd, n_simd, k_simd, m_splits, n_splits)
};
let constants = Some(ConstantValues::new(vec![
(0, Value::USize(m)),
(1, Value::USize(n)),
(2, Value::USize(k)),
(10, Value::Bool(a_trans)),
(11, Value::Bool(b_trans)),
(13, Value::Bool(d_trans)),
(20, Value::F32(alpha)),
(21, Value::F32(beta)),
(100, Value::Bool(batched)),
(101, Value::Bool(fused_activation)),
// Garbage
(102, Value::Bool(false)),
(103, Value::Bool(false)),
(113, Value::Bool(false)),
(50_000, Value::Bool(false)),
// End garbage
(200, Value::U16(m_simd)),
(201, Value::U16(n_simd)),
(202, Value::U16(k_simd)),
(210, Value::U16(m_splits)),
(211, Value::U16(n_splits)),
(50_001, Value::Bool(fused_bias)),
]));
let pipeline = kernels.load_pipeline_with_constants(device, Source::Mfa, name, constants)?;
let m_group = m_simd * m_splits;
let n_group = n_simd * n_splits;
let a_block_length = m_group * k_simd;
let b_block_length = k_simd * n_group;
let mut block_elements = a_block_length + b_block_length;
if (m % 8 != 0) && (n % 8 != 0) {
let c_block_length = m_group * n_group;
block_elements = std::cmp::max(c_block_length, block_elements)
}
if fused_bias {
if d_trans {
block_elements = std::cmp::max(block_elements, m_group);
} else {
block_elements = std::cmp::max(block_elements, n_group);
}
}
let bytes = match name {
"sgemm" => 4,
"hgemm" => 2,
"bgemm" => 2,
other => {
return Err(MetalKernelError::LoadLibraryError(format!(
"{other} is not a valid kernel for gemm"
)));
}
};
let block_bytes = block_elements * bytes;
let encoder = ep.encoder();
let encoder: &ComputeCommandEncoderRef = encoder.as_ref();
encoder.set_compute_pipeline_state(&pipeline);
encoder.set_threadgroup_memory_length(0, block_bytes.into());
encoder.set_buffer(0, Some(lhs_buffer), lhs_offset as NSUInteger);
encoder.set_buffer(1, Some(rhs_buffer), rhs_offset as NSUInteger);
encoder.set_buffer(2, Some(output), 0);
// TODO Tensor D
let grid_z = b;
if batched {
let byte_stride_a: usize = lhs_stride[lhs_stride.len() - 3] * bytes as usize;
let byte_stride_b: usize = rhs_stride[rhs_stride.len() - 3] * bytes as usize;
let byte_stride_c = m * n * bytes as usize;
// TODO byte_stride_d
let byte_stride_d = 0;
let buffer: Vec<u64> = vec![
byte_stride_a as _,
byte_stride_b as _,
byte_stride_c as _,
byte_stride_d as _,
];
encoder.set_bytes(
10,
(buffer.len() * core::mem::size_of::<u64>()) as NSUInteger,
buffer.as_ptr() as *const NSUInteger as *const c_void,
);
}
let grid_size = MTLSize {
width: divide(n, n_group.into()),
height: divide(m, m_group.into()),
depth: grid_z as NSUInteger,
};
let group_size = MTLSize {
width: 32 * (m_splits as u64) * (n_splits as u64),
height: 1,
depth: 1,
};
encoder.use_resource(lhs_buffer, metal::MTLResourceUsage::Read);
encoder.use_resource(rhs_buffer, metal::MTLResourceUsage::Read);
encoder.use_resource(output, metal::MTLResourceUsage::Write);
encoder.dispatch_thread_groups(grid_size, group_size);
Ok(())
}
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
pub enum SdpaDType {
BF16,
F16,
F32,
}
/// SDPA full is supported when:
/// - q head dim == 64, 128
/// - no mask
/// - q heads == kv heads
/// - final type != bf16 (TODO maybe just template this kernel too?)
/// - q,k,v are contiguous
#[allow(clippy::too_many_arguments)]
pub fn call_sdpa_full(
device: &Device,
ep: impl EncoderProvider,
kernels: &Kernels,
q_offset: usize,
q_shape: &[usize],
q_buffer: &Buffer,
k_offset: usize,
k_buffer: &Buffer,
v_offset: usize,
v_buffer: &Buffer,
output: &Buffer,
alpha: f32,
softcapping: f32,
itype: SdpaDType,
) -> Result<(), MetalKernelError> {
#[derive(Debug)]
#[repr(C)]
struct MLXFastAttentionParams {
m: i32,
n: i32,
k: i32,
ldq: i32, // ldq == ldo
ldk: i32,
ldv: i32,
lds: i32,
ldo: i32,
tiles_n: i32,
tiles_m: i32,
batch_stride_q: i32,
batch_stride_k: i32,
batch_stride_v: i32,
batch_stride_o: i32,
swizzle_log: i32,
gemm_n_iterations_aligned: i32,
gemm_k_iterations_aligned: i32,
gemm_sv_m_block_iterations: i32,
batch_ndim: i32,
alpha: f32,
softcapping: f32,
}
let bk = q_shape.last().unwrap();
const BN: usize = 16;
const BM: usize = 16;
const WM: usize = 2;
const WN: usize = 2;
let name = match (bk, itype) {
(32, SdpaDType::F16) => "steel_gemm_attention_bm_16_bn_16_bk_32_itype_half",
(64, SdpaDType::F16) => "steel_gemm_attention_bm_16_bn_16_bk_64_itype_half",
(96, SdpaDType::F16) => "steel_gemm_attention_bm_16_bn_16_bk_96_itype_half",
(128, SdpaDType::F16) => "steel_gemm_attention_bm_16_bn_16_bk_128_itype_half",
(256, SdpaDType::F16) => "steel_gemm_attention_bm_16_bn_16_bk_256_itype_half",
(32, SdpaDType::F32) => "steel_gemm_attention_bm_16_bn_16_bk_32_itype_float",
(64, SdpaDType::F32) => "steel_gemm_attention_bm_16_bn_16_bk_64_itype_float",
(96, SdpaDType::F32) => "steel_gemm_attention_bm_16_bn_16_bk_96_itype_float",
(128, SdpaDType::F32) => "steel_gemm_attention_bm_16_bn_16_bk_128_itype_float",
(256, SdpaDType::F32) => "steel_gemm_attention_bm_16_bn_16_bk_256_itype_float",
(other, SdpaDType::F16 | SdpaDType::F32) => {
return Err(MetalKernelError::SdpaHeadSizeMismatch {
variation: "full",
got: *other,
expected: vec![32, 64, 96, 128, 256],
})
}
(_, SdpaDType::BF16) => {
return Err(MetalKernelError::SdpaHeadDTypeMismatch {
variation: "full",
got: SdpaDType::BF16,
})
}
};
let pipeline = kernels.load_pipeline(device, Source::Sdpa, name)?;
let encoder = ep.encoder();
let encoder: &ComputeCommandEncoderRef = encoder.as_ref();
encoder.set_compute_pipeline_state(&pipeline);
// q = (bs, qhead, seq, hidden)
// k/v = (bs, kv_head, seq, hidden)
let qseq = q_shape[q_shape.len() - 2];
let m = q_shape[q_shape.len() - 2];
let n = m;
let k = q_shape[q_shape.len() - 1];
let bs_out = q_shape[0] * q_shape[1];
let batch_shape = [q_shape[0] * q_shape[1]];
let dk = q_shape[q_shape.len() - 1];
let ldq = dk;
let ldk = dk;
let ldv = dk;
let lds = BN;
let ldo = dk;
let tn = 1;
let tm = m.div_ceil(BM);
let b_stride_q = dk * qseq;
let b_stride_k = dk * qseq;
let b_stride_v = dk * qseq;
let b_stride_o = dk * qseq;
let swizzle_log = 0;
let gemm_n_iterations_aligned = n.div_ceil(BN);
let gemm_k_iterations_aligned = k.div_ceil(*bk);
let gemm_sv_m_block_iterations = m.div_ceil(BM);
let batch_ndim = batch_shape.len();
let alpha = if softcapping != 1. {
alpha / softcapping
} else {
alpha
};
let params = MLXFastAttentionParams {
m: m as i32,
n: n as i32,
k: k as i32,
ldq: ldq as i32,
ldk: ldk as i32,
ldv: ldv as i32,
lds: lds as i32,
ldo: ldo as i32,
tiles_n: tn,
tiles_m: tm as i32,
batch_stride_q: b_stride_q as i32,
batch_stride_k: b_stride_k as i32,
batch_stride_v: b_stride_v as i32,
batch_stride_o: b_stride_o as i32,
swizzle_log,
gemm_n_iterations_aligned: gemm_n_iterations_aligned as i32,
gemm_k_iterations_aligned: gemm_k_iterations_aligned as i32,
gemm_sv_m_block_iterations: gemm_sv_m_block_iterations as i32,
batch_ndim: batch_ndim as i32,
alpha,
softcapping,
};
let batch_strides = [b_stride_q, b_stride_k, b_stride_v, b_stride_o];
impl EncoderParam for MLXFastAttentionParams {
fn set_param(encoder: &ComputeCommandEncoderRef, position: u64, data: Self) {
encoder.set_bytes(
position,
core::mem::size_of::<MLXFastAttentionParams>() as u64,
&data as *const MLXFastAttentionParams as *const c_void,
);
}
}
set_params!(
encoder,
(
(q_buffer, q_offset),
(k_buffer, k_offset),
(v_buffer, v_offset),
output,
params,
&batch_shape[..],
&batch_strides[..]
)
);
let grid_dims = MTLSize {
width: 1,
height: tm as u64,
depth: bs_out as u64,
};
let group_dims = MTLSize {
width: 32,
height: WM as u64,
depth: WN as u64,
};
encoder.use_resource(q_buffer, metal::MTLResourceUsage::Read);
encoder.use_resource(k_buffer, metal::MTLResourceUsage::Read);
encoder.use_resource(v_buffer, metal::MTLResourceUsage::Read);
encoder.use_resource(output, metal::MTLResourceUsage::Write);
encoder.dispatch_thread_groups(grid_dims, group_dims);
Ok(())
}
/// SDPA full is supported when:
/// - q head dim == 64, 96, 128
/// - no mask
/// - q,k,v are contiguous
#[allow(clippy::too_many_arguments)]
pub fn call_sdpa_vector(
device: &Device,
ep: impl EncoderProvider,
kernels: &Kernels,
q_offset: usize,
q_shape: &[usize],
q_buffer: &Buffer,
k_offset: usize,
k_shape: &[usize],
k_stride: &[usize],
k_buffer: &Buffer,
v_offset: usize,
v_stride: &[usize],
v_buffer: &Buffer,
output: &Buffer,
alpha: f32,
softcapping: f32,
itype: SdpaDType,
) -> Result<(), MetalKernelError> {
let bk = q_shape.last().unwrap();
let gqa_factor = (q_shape[1] / k_shape[1]) as i32;
let n = k_shape[2] as i32;
let b = (q_shape[0] * q_shape[1]) as i32;
let kstride = k_stride[1];
let vstride = v_stride[1];
let name = match (bk, itype) {
(32, SdpaDType::F16) => "sdpa_vector_float16_t_32",
(64, SdpaDType::F16) => "sdpa_vector_float16_t_64",
(96, SdpaDType::F16) => "sdpa_vector_float16_t_96",
(128, SdpaDType::F16) => "sdpa_vector_float16_t_128",
(256, SdpaDType::F16) => "sdpa_vector_float16_t_256",
(32, SdpaDType::BF16) => "sdpa_vector_bfloat16_t_32",
(64, SdpaDType::BF16) => "sdpa_vector_bfloat16_t_64",
(96, SdpaDType::BF16) => "sdpa_vector_bfloat16_t_96",
(128, SdpaDType::BF16) => "sdpa_vector_bfloat16_t_128",
(256, SdpaDType::BF16) => "sdpa_vector_bfloat16_t_256",
(32, SdpaDType::F32) => "sdpa_vector_float_32",
(64, SdpaDType::F32) => "sdpa_vector_float_64",
(96, SdpaDType::F32) => "sdpa_vector_float_96",
(128, SdpaDType::F32) => "sdpa_vector_float_128",
(256, SdpaDType::F32) => "sdpa_vector_float_256",
(other, _) => {
return Err(MetalKernelError::SdpaHeadSizeMismatch {
variation: "vector",
got: *other,
expected: vec![32, 64, 96, 128, 256],
})
}
};
let alpha = if softcapping != 1. {
alpha / softcapping
} else {
alpha
};
let pipeline = kernels.load_pipeline(device, Source::Sdpa, name)?;
let encoder = ep.encoder();
let encoder: &ComputeCommandEncoderRef = encoder.as_ref();
encoder.set_compute_pipeline_state(&pipeline);
// q = (bs, qhead, seq, hidden)
// k/v = (bs, kv_head, kv_seq, hidden)
set_params!(
encoder,
(
(q_buffer, q_offset),
(k_buffer, k_offset),
(v_buffer, v_offset),
output,
gqa_factor,
n,
kstride,
vstride,
alpha,
softcapping
)
);
let grid_dims = MTLSize {
width: 1,
height: b as u64,
depth: 1_u64,
};
let group_dims = MTLSize {
width: 1024,
height: 1,
depth: 1,
};
encoder.use_resource(q_buffer, metal::MTLResourceUsage::Read);
encoder.use_resource(k_buffer, metal::MTLResourceUsage::Read);
encoder.use_resource(v_buffer, metal::MTLResourceUsage::Read);
encoder.use_resource(output, metal::MTLResourceUsage::Write);
encoder.dispatch_thread_groups(grid_dims, group_dims);
Ok(())
}
#[allow(clippy::too_many_arguments)]
pub fn call_im2col1d_strided(
device: &Device,
ep: impl EncoderProvider,
kernels: &Kernels,
name: &'static str,
shape: &[usize],
strides: &[usize],
(k_size, stride, padding, dilation): (usize, usize, usize, usize),
input: BufferOffset,
output: &Buffer,
) -> Result<(), MetalKernelError> {
let pipeline = kernels.load_pipeline(device, Source::Conv, name)?;
let l_out = (shape[2] + 2 * padding - dilation * (k_size - 1) - 1) / stride + 1;
let dst_el = shape[0] * l_out * shape[1] * k_size;
let encoder = ep.encoder();
let encoder: &ComputeCommandEncoderRef = encoder.as_ref();
let (thread_group_count, thread_group_size) = linear_split(&pipeline, dst_el);
encoder.set_compute_pipeline_state(&pipeline);
set_params!(
encoder,
(dst_el, l_out, k_size, stride, padding, dilation, shape, strides, &input, output)
);
encoder.use_resource(input.buffer, metal::MTLResourceUsage::Read);
encoder.use_resource(output, metal::MTLResourceUsage::Write);
encoder.dispatch_thread_groups(thread_group_count, thread_group_size);
Ok(())
}
#[allow(clippy::too_many_arguments)]
pub fn call_col2im1d(
device: &Device,
ep: impl EncoderProvider,
kernels: &Kernels,
name: &'static str,
shape: &[usize],
k_size: usize,
stride: usize,
input: BufferOffset,
output: &Buffer,
) -> Result<(), MetalKernelError> {
let pipeline = kernels.load_pipeline(device, Source::Conv, name)?;
let l_in = shape[1];
let c_out = shape[2];
let l_out = (l_in - 1) * stride + k_size;
let dst_el = shape[0] * c_out * l_out;
let encoder = ep.encoder();
let encoder: &ComputeCommandEncoderRef = encoder.as_ref();
let (thread_group_count, thread_group_size) = linear_split(&pipeline, dst_el);
encoder.set_compute_pipeline_state(&pipeline);
set_params!(
encoder,
(dst_el, l_out, l_in, c_out, k_size, stride, &input, output)
);
encoder.use_resource(input.buffer, metal::MTLResourceUsage::Read);
encoder.use_resource(output, metal::MTLResourceUsage::Write);
encoder.dispatch_thread_groups(thread_group_count, thread_group_size);
Ok(())
}
#[allow(clippy::too_many_arguments)]
pub fn call_im2col_strided(
device: &Device,
ep: impl EncoderProvider,
kernels: &Kernels,
name: &'static str,
shape: &[usize],
strides: &[usize],
(h_k, w_k, stride, padding, dilation): (usize, usize, usize, usize, usize),
input: BufferOffset,
output: &Buffer,
) -> Result<(), MetalKernelError> {
let pipeline = kernels.load_pipeline(device, Source::Conv, name)?;
let h = shape[2];
let w = shape[3];
let h_out = (h + 2 * padding - dilation * (h_k - 1) - 1) / stride + 1;
let w_out = (w + 2 * padding - dilation * (w_k - 1) - 1) / stride + 1;
let dst_el = shape[0] * h_out * w_out * shape[1] * h_k * w_k;
let encoder = ep.encoder();
let encoder: &ComputeCommandEncoderRef = encoder.as_ref();
let (thread_group_count, thread_group_size) = linear_split(&pipeline, dst_el);
encoder.set_compute_pipeline_state(&pipeline);
set_params!(
encoder,
(
dst_el, h_out, w_out, h_k, w_k, stride, padding, dilation, shape, strides, &input,
output
)
);
encoder.use_resource(input.buffer, metal::MTLResourceUsage::Read);
encoder.use_resource(output, metal::MTLResourceUsage::Write);
encoder.dispatch_thread_groups(thread_group_count, thread_group_size);
Ok(())
}
#[allow(clippy::too_many_arguments)]
pub fn call_upsample_nearest_2d(
device: &Device,
ep: impl EncoderProvider,
kernels: &Kernels,
name: &'static str,
shape: &[usize],
strides: &[usize],
out_w: usize,
out_h: usize,
input: BufferOffset,
output: &Buffer,
) -> Result<(), MetalKernelError> {
let pipeline = kernels.load_pipeline(device, Source::Conv, name)?;
let dst_el = out_w * out_h * shape[0] * shape[1];
let scale_w = shape[2] as f32 / out_w as f32;
let scale_h = shape[3] as f32 / out_h as f32;
let (thread_group_count, thread_group_size) = linear_split(&pipeline, dst_el);
let encoder = ep.encoder();
let encoder: &ComputeCommandEncoderRef = encoder.as_ref();
encoder.set_compute_pipeline_state(&pipeline);
set_params!(
encoder,
(out_w, out_h, scale_w, scale_h, shape, strides, &input, output)
);
encoder.use_resource(input.buffer, metal::MTLResourceUsage::Read);
encoder.use_resource(output, metal::MTLResourceUsage::Write);
encoder.dispatch_thread_groups(thread_group_count, thread_group_size);
Ok(())
}
#[allow(clippy::too_many_arguments)]
pub fn call_random_uniform(
device: &Device,
ep: impl EncoderProvider,
kernels: &Kernels,
name: &'static str,
min: f32,
max: f32,
length: usize,
seed: &Buffer,
buffer: &Buffer,
) -> Result<(), MetalKernelError> {
if min >= max {
return Err(MetalKernelError::LoadLibraryError(
"min must be less than max".to_string(),
));
}
let pipeline = kernels.load_pipeline(device, Source::Random, name)?;
let encoder = ep.encoder();
let encoder: &ComputeCommandEncoderRef = encoder.as_ref();
let odd = (length % 2 != 0) as usize;
let (thread_group_count, thread_group_size) = linear_split(&pipeline, length / 2 + odd);
encoder.set_compute_pipeline_state(&pipeline);
set_params!(encoder, (length, min, max, seed, buffer));
encoder.use_resource(
seed,
metal::MTLResourceUsage::Read | metal::MTLResourceUsage::Write,
);
encoder.use_resource(buffer, metal::MTLResourceUsage::Write);
encoder.dispatch_thread_groups(thread_group_count, thread_group_size);
Ok(())
}
#[allow(clippy::too_many_arguments)]
pub fn call_random_normal(
device: &Device,
ep: impl EncoderProvider,
kernels: &Kernels,
name: &'static str,
mean: f32,
stddev: f32,
length: usize,
seed: &Buffer,
buffer: &Buffer,
) -> Result<(), MetalKernelError> {
let pipeline = kernels.load_pipeline(device, Source::Random, name)?;
let encoder = ep.encoder();
let encoder: &ComputeCommandEncoderRef = encoder.as_ref();
let odd = (length % 2 != 0) as usize;
let (thread_group_count, thread_group_size) = linear_split(&pipeline, length / 2 + odd);
encoder.set_compute_pipeline_state(&pipeline);
set_params!(encoder, (length, mean, stddev, seed, buffer));
encoder.use_resource(
seed,
metal::MTLResourceUsage::Read | metal::MTLResourceUsage::Write,
);
encoder.use_resource(buffer, metal::MTLResourceUsage::Write);
encoder.dispatch_thread_groups(thread_group_count, thread_group_size);
Ok(())
}
#[derive(Debug, Clone, Copy)]
pub enum GgmlDType {
Q4_0,
Q4_1,
Q5_0,
Q5_1,
Q8_0,
Q8_1,
Q2K,
Q3K,
Q4K,
Q5K,
Q6K,
Q8K,
F16,
F32,
}
#[allow(clippy::too_many_arguments)]
pub fn call_quantized_matmul_mv_t(
device: &Device,
ep: impl EncoderProvider,
kernels: &Kernels,
dtype: GgmlDType,
(b, m, n, k): (usize, usize, usize, usize),
lhs: &Buffer,
lhs_offset: usize,
rhs: &Buffer,
dst_offset: usize,
dst: &Buffer,
) -> Result<(), MetalKernelError> {
// Everything is in reverse
let ne00 = k as i64;
let ne01 = n as i64;
let ne02 = b as i64;
let ne03 = 1i64;
let nb00 = 0i64;
let nb01 = 0i64;
let nb02 = 0i64;
let ne10 = k as i64;
let ne11 = m as i64;
let ne12 = b as i64;
let ne13 = 1i64;
let nb10 = 0i64;
let nb11 = 0i64;
let nb12 = 0i64;
let ne0 = n as i64;
let ne1 = m as i64;
let r2: u32 = (ne12 / ne02) as u32;
let r3: u32 = (ne13 / ne03) as u32;
let (nth0, nth1, align) = match dtype {
GgmlDType::Q4_0
| GgmlDType::Q4_1
| GgmlDType::Q5_0
| GgmlDType::Q5_1
| GgmlDType::Q8_0
| GgmlDType::Q8_1 => {
let nth0 = 8;
let nth1 = 8;
let align = 8;
(nth0, nth1, align)
}
GgmlDType::Q2K => {
// Fixing a bug in Metal for GGML
// https://github.com/ggerganov/llama.cpp/blob/b8109bc0139f15a5b321909f47510b89dca47ffc/ggml-metal.m#L1576
let nth0 = 2;
let nth1 = 32;
let align = 4;
(nth0, nth1, align)
}
GgmlDType::Q4K => {
let nth0 = 4;
let nth1 = 8;
let align = 4;
(nth0, nth1, align)
}
GgmlDType::Q3K | GgmlDType::Q5K => {
let nth0 = 2;
let nth1 = 32;
let align = 4;
(nth0, nth1, align)
}
GgmlDType::Q6K => {
let nth0 = 2;
let nth1 = 32;
let align = 2;
(nth0, nth1, align)
}
GgmlDType::F16 | GgmlDType::Q8K => {
// Original implem uses rows
let nth0 = 32;
let nth1 = 1;
let align = 8;
(nth0, nth1, align)
}
GgmlDType::F32 => {
let nth0 = 32;
let nth1 = 1;
let align = 8;
(nth0, nth1, align)
}
};
let thread_groups_count = MTLSize {
width: divide(ne01 as usize, align),
height: ne11 as u64,
depth: (ne12 * ne13) as u64,
};
let threads_per_threadgroup = MTLSize {
width: nth0,
height: nth1,
depth: 1,
};
let name = match dtype {
GgmlDType::Q4_0 => "kernel_mul_mv_q4_0_f32",
GgmlDType::Q4_1 => "kernel_mul_mv_q4_1_f32",
GgmlDType::Q5_0 => "kernel_mul_mv_q5_0_f32",
GgmlDType::Q5_1 => "kernel_mul_mv_q5_1_f32",
GgmlDType::Q8_0 => "kernel_mul_mv_q8_0_f32",
GgmlDType::Q8_1 => "kernel_mul_mv_q8_1_f32",
GgmlDType::Q2K => "kernel_mul_mv_q2_K_f32",
GgmlDType::Q3K => "kernel_mul_mv_q3_K_f32",
GgmlDType::Q4K => "kernel_mul_mv_q4_K_f32",
GgmlDType::Q5K => "kernel_mul_mv_q5_K_f32",
GgmlDType::Q6K => "kernel_mul_mv_q6_K_f32",
GgmlDType::Q8K => "kernel_mul_mv_q8_K_f32",
GgmlDType::F16 => "kernel_mul_mv_f16_f32",
GgmlDType::F32 => "kernel_mul_mv_f32_f32",
};
let pipeline = kernels.load_pipeline(device, Source::Quantized, name)?;
let encoder = ep.encoder();
let encoder: &ComputeCommandEncoderRef = encoder.as_ref();
encoder.set_compute_pipeline_state(&pipeline);
set_params!(
encoder,
(
rhs,
(lhs, lhs_offset),
(dst, dst_offset),
ne00,
ne01,
ne02,
nb00,
nb01,
nb02,
ne10,
ne11,
ne12,
nb10,
nb11,
nb12,
ne0,
ne1,
r2,
r3
)
);
encoder.use_resource(lhs, metal::MTLResourceUsage::Read);
encoder.use_resource(rhs, metal::MTLResourceUsage::Read);
encoder.use_resource(dst, metal::MTLResourceUsage::Write);
encoder.dispatch_thread_groups(thread_groups_count, threads_per_threadgroup);
Ok(())
}
fn divide(m: usize, b: usize) -> NSUInteger {
m.div_ceil(b) as NSUInteger
}
#[allow(clippy::too_many_arguments)]
pub fn call_pool2d(
device: &Device,
ep: impl EncoderProvider,
kernels: &Kernels,
name: &'static str,
shape: &[usize],
strides: &[usize],
out_w: usize,
out_h: usize,
w_k: usize,
h_k: usize,
w_stride: usize,
h_stride: usize,
input: &Buffer,
output: &Buffer,
) -> Result<(), MetalKernelError> {
let dst_el = out_w * out_h * shape[0] * shape[1];
let pipeline: ComputePipelineState = kernels.load_pipeline(device, Source::Conv, name)?;
let (thread_group_count, thread_group_size) = linear_split(&pipeline, dst_el);
let encoder = ep.encoder();
let encoder: &ComputeCommandEncoderRef = encoder.as_ref();
encoder.set_compute_pipeline_state(&pipeline);
set_params!(
encoder,
(w_k, h_k, w_stride, h_stride, shape, strides, input, output)
);
encoder.use_resource(input, metal::MTLResourceUsage::Read);
encoder.use_resource(output, metal::MTLResourceUsage::Write);
encoder.dispatch_thread_groups(thread_group_count, thread_group_size);
Ok(())
}
#[allow(clippy::too_many_arguments)]
pub fn call_conv_transpose1d(
device: &Device,
ep: impl EncoderProvider,
kernels: &Kernels,
name: &'static str,
dilation: usize,
stride: usize,
padding: usize,
out_padding: usize,
c_out: usize,
l_out: usize,
b_size: usize,
src_shape: &[usize],
src_strides: &[usize],
kernel_shape: &[usize],
kernel_strides: &[usize],
input: &Buffer,
input_offset: usize,
kernel: &Buffer,
kernel_offset: usize,
output: &Buffer,
) -> Result<(), MetalKernelError> {
let dst_el = c_out * l_out * b_size;
let pipeline: ComputePipelineState = kernels.load_pipeline(device, Source::Conv, name)?;
let (thread_group_count, thread_group_size) = linear_split(&pipeline, dst_el);
let encoder = ep.encoder();
let encoder: &ComputeCommandEncoderRef = encoder.as_ref();
encoder.set_compute_pipeline_state(&pipeline);
set_params!(
encoder,
(
l_out,
stride,
padding,
out_padding,
dilation,
src_shape,
src_strides,
kernel_shape,
kernel_strides,
(input, input_offset),
(kernel, kernel_offset),
output
)
);
encoder.use_resource(input, metal::MTLResourceUsage::Read);
encoder.use_resource(kernel, metal::MTLResourceUsage::Read);
encoder.use_resource(output, metal::MTLResourceUsage::Write);
encoder.dispatch_thread_groups(thread_group_count, thread_group_size);
Ok(())
}
pub struct CallConvTranspose2dCfg<'a> {
pub dilation: usize,
pub stride: usize,
pub padding: usize,
pub output_padding: usize,
pub c_out: usize,
pub out_w: usize,
pub out_h: usize,
pub b_size: usize,
pub input_dims: &'a [usize],
pub input_stride: &'a [usize],
pub kernel_dims: &'a [usize],
pub kernel_stride: &'a [usize],
pub input_offset: usize,
pub kernel_offset: usize,
}
#[allow(clippy::too_many_arguments)]
pub fn call_conv_transpose2d(
device: &Device,
ep: impl EncoderProvider,
kernels: &Kernels,
name: &'static str,
cfg: CallConvTranspose2dCfg,
input: &Buffer,
kernel: &Buffer,
output: &Buffer,
) -> Result<(), MetalKernelError> {
let dst_el = cfg.c_out * cfg.out_w * cfg.out_h * cfg.b_size;
let pipeline: ComputePipelineState = kernels.load_pipeline(device, Source::Conv, name)?;
let (thread_group_count, thread_group_size) = linear_split(&pipeline, dst_el);
let encoder = ep.encoder();
let encoder: &ComputeCommandEncoderRef = encoder.as_ref();
encoder.set_compute_pipeline_state(&pipeline);
set_params!(
encoder,
(
cfg.out_w,
cfg.out_h,
cfg.stride,
cfg.padding,
cfg.output_padding,
cfg.dilation,
cfg.input_dims,
cfg.input_stride,
cfg.kernel_dims,
cfg.kernel_stride,
(input, cfg.input_offset),
(kernel, cfg.kernel_offset),
output
)
);
encoder.use_resource(input, metal::MTLResourceUsage::Read);
encoder.use_resource(kernel, metal::MTLResourceUsage::Read);
encoder.use_resource(output, metal::MTLResourceUsage::Write);
encoder.dispatch_thread_groups(thread_group_count, thread_group_size);
Ok(())
}
#[allow(clippy::too_many_arguments)]
pub fn call_arg_sort(
device: &Device,
ep: impl EncoderProvider,
kernels: &Kernels,
name: &'static str,
nrows: usize,
ncols: usize,
ncols_pad: usize,
src: BufferOffset,
dst: &Buffer,
) -> Result<(), MetalKernelError> {
let pipeline = kernels.load_pipeline(device, Source::Sort, name)?;
let encoder = ep.encoder();
let encoder: &ComputeCommandEncoderRef = encoder.as_ref();
encoder.set_compute_pipeline_state(&pipeline);
set_params!(encoder, (&src, dst, ncols as i64, ncols_pad as i64));
let thread_group_count = MTLSize {
width: 1,
height: nrows as u64,
depth: 1,
};
let thread_group_size = MTLSize {
width: ncols_pad as u64,
height: 1,
depth: 1,
};
encoder.use_resource(src.buffer, metal::MTLResourceUsage::Read);
encoder.use_resource(dst, metal::MTLResourceUsage::Write);
encoder.set_threadgroup_memory_length(0, (ncols_pad * 4).max(16) as u64);
encoder.dispatch_thread_groups(thread_group_count, thread_group_size);
Ok(())
}
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
pub enum GemmDType {
BF16,
F16,
F32,
}
#[allow(clippy::too_many_arguments)]
pub fn call_mlx_gemm(
device: &Device,
ep: impl EncoderProvider,
kernels: &Kernels,
dtype: GemmDType,
(b, m, n, k): (usize, usize, usize, usize),
lhs_stride: &[usize],
lhs_offset: usize,
lhs_buffer: &Buffer,
rhs_stride: &[usize],
rhs_offset: usize,
rhs_buffer: &Buffer,
output: &Buffer,
) -> Result<(), MetalKernelError> {
#[derive(Debug)]
#[repr(C)]
struct GemmParams {
m: i32,
n: i32,
k: i32,
lda: i32,
ldb: i32,
ldd: i32,
tiles_n: i32,
tiles_m: i32,
batch_stride_a: isize,
batch_stride_b: isize,
batch_stride_d: isize,
swizzle_log: i32,
gemm_k_iterations_aligned: i32,
batch_ndim: i32,
}
assert!(rhs_stride.len() >= 2);
assert!(lhs_stride.len() >= 2);
let rhs_m1 = rhs_stride[rhs_stride.len() - 1];
let rhs_m2 = rhs_stride[rhs_stride.len() - 2];
let lhs_m1 = lhs_stride[lhs_stride.len() - 1];
let lhs_m2 = lhs_stride[lhs_stride.len() - 2];
// lhs has shape b, m, k
// We also allow for the case where the stride on the minor dimension is not as expected but
// there is a single element.
let (lda, a_trans) = if (lhs_m1 == 1 || k == 1) && (lhs_m2 == k || m == 1) {
(k as i32, false)
} else if (lhs_m1 == m || k == 1) && (lhs_m2 == 1 || m == 1) {
(m as i32, true)
} else {
return Err(MetalKernelError::MatMulNonContiguous {
lhs_stride: lhs_stride.to_vec(),
rhs_stride: rhs_stride.to_vec(),
mnk: (m, n, k),
})?;
};
// rhs has shape b, k, n
let (ldb, b_trans) = if (rhs_m1 == 1 || n == 1) && (rhs_m2 == n || k == 1) {
(n as i32, false)
} else if (rhs_m1 == k || n == 1) && (rhs_m2 == 1 || k == 1) {
(k as i32, true)
} else {
return Err(MetalKernelError::MatMulNonContiguous {
lhs_stride: lhs_stride.to_vec(),
rhs_stride: rhs_stride.to_vec(),
mnk: (m, n, k),
})?;
};
let (bm, bn, bk, wn, wm) = (32, 32, 16, 2, 2);
// https://github.com/ml-explore/mlx/blob/02efb310cac667bc547d1b96f21596c221f84fe7/mlx/backend/metal/matmul.cpp#L422
let constants = Some(ConstantValues::new(vec![
(10, Value::Bool(/* has_batch */ b > 1)),
(100, Value::Bool(/* use_out_source */ false)),
(110, Value::Bool(/* do_axpby */ false)),
(200, Value::Bool(/* align_m */ m % bm == 0)),
(201, Value::Bool(/* align_n */ n % bn == 0)),
(202, Value::Bool(/* align_k */ k % bk == 0)),
(300, Value::Bool(/* do_gather */ false)),
]));
let swizzle_log = 0;
let tile = 1 << swizzle_log;
let tn = n.div_ceil(bn);
let tm = m.div_ceil(bm);
let tn = tn * tile;
let tm = tm.div_ceil(tile);
let batch_stride_a = if lhs_stride.len() > 2 {
lhs_stride[lhs_stride.len() - 3]
} else {
m * k
};
let batch_stride_b = if rhs_stride.len() > 2 {
rhs_stride[rhs_stride.len() - 3]
} else {
n * k
};
let gemm_params = GemmParams {
m: m as i32,
n: n as i32,
k: k as i32,
lda,
ldb,
ldd: n as i32,
tiles_n: tn as i32,
tiles_m: tm as i32,
swizzle_log,
batch_stride_a: batch_stride_a as isize,
batch_stride_b: batch_stride_b as isize,
batch_stride_d: (m * n) as isize,
batch_ndim: 1i32,
gemm_k_iterations_aligned: (k / bk) as i32,
};
let batch_strides = [gemm_params.batch_stride_a, gemm_params.batch_stride_b];
// TODO(laurent): generate the name
// template [[host_name("gemm_" #tname "_" #iname "_" #oname "_bm" #bm "_bn" #bn "_bk" #bk "_wm" #wm "_wn" #wn)]]
let name = match (dtype, a_trans, b_trans) {
(GemmDType::F32, false, false) => "gemm_nn_f32_f32_32_32_16_2_2",
(GemmDType::F32, true, false) => "gemm_tn_f32_f32_32_32_16_2_2",
(GemmDType::F32, false, true) => "gemm_nt_f32_f32_32_32_16_2_2",
(GemmDType::F32, true, true) => "gemm_tt_f32_f32_32_32_16_2_2",
(GemmDType::BF16, false, false) => "gemm_nn_bf16_bf16_32_32_16_2_2",
(GemmDType::BF16, true, false) => "gemm_tn_bf16_bf16_32_32_16_2_2",
(GemmDType::BF16, false, true) => "gemm_nt_bf16_bf16_32_32_16_2_2",
(GemmDType::BF16, true, true) => "gemm_tt_bf16_bf16_32_32_16_2_2",
(GemmDType::F16, false, false) => "gemm_nn_f16_f16_32_32_16_2_2",
(GemmDType::F16, true, false) => "gemm_tn_f16_f16_32_32_16_2_2",
(GemmDType::F16, false, true) => "gemm_nt_f16_f16_32_32_16_2_2",
(GemmDType::F16, true, true) => "gemm_tt_f16_f16_32_32_16_2_2",
};
let pipeline = kernels.load_pipeline_with_constants(device, Source::Gemm, name, constants)?;
let encoder = ep.encoder();
let encoder: &ComputeCommandEncoderRef = encoder.as_ref();
encoder.set_compute_pipeline_state(&pipeline);
encoder.set_buffer(0, Some(lhs_buffer), lhs_offset as NSUInteger);
encoder.set_buffer(1, Some(rhs_buffer), rhs_offset as NSUInteger);
encoder.set_buffer(3, Some(output), 0);
encoder.set_bytes(
4,
std::mem::size_of::<GemmParams>() as u64,
&gemm_params as *const GemmParams as *const c_void,
);
encoder.set_bytes(
6, // batch_shape
std::mem::size_of::<i32>() as u64,
&(b as i32) as *const i32 as *const c_void,
);
encoder.set_bytes(
7,
(std::mem::size_of::<isize>() * batch_strides.len()) as u64,
batch_strides.as_ptr() as *const c_void,
);
let grid_size = MTLSize {
width: tn as u64,
height: tm as u64,
depth: /* batch_size_out */ b as u64,
};
let group_size = MTLSize {
width: 32,
height: wn,
depth: wm,
};
encoder.use_resource(lhs_buffer, metal::MTLResourceUsage::Read);
encoder.use_resource(rhs_buffer, metal::MTLResourceUsage::Read);
encoder.use_resource(output, metal::MTLResourceUsage::Write);
encoder.dispatch_thread_groups(grid_size, group_size);
Ok(())
}
pub fn call_const_fill(
device: &Device,
ep: impl EncoderProvider,
kernels: &Kernels,
name: &'static str,
length: usize,
output: &Buffer,
v: f32,
) -> Result<(), MetalKernelError> {
let pipeline = kernels.load_pipeline(device, Source::Fill, name)?;
let encoder = ep.encoder();
let encoder: &ComputeCommandEncoderRef = encoder.as_ref();
encoder.set_compute_pipeline_state(&pipeline);
set_params!(encoder, (output, v, length));
let (thread_group_count, thread_group_size) = linear_split(&pipeline, length);
encoder.use_resource(output, metal::MTLResourceUsage::Write);
encoder.dispatch_thread_groups(thread_group_count, thread_group_size);
Ok(())
}
#[cfg(test)]
mod tests;
| 9 |
0 | hf_public_repos/candle | hf_public_repos/candle/candle-pyo3/test.py | import candle
print(f"mkl: {candle.utils.has_mkl()}")
print(f"accelerate: {candle.utils.has_accelerate()}")
print(f"num-threads: {candle.utils.get_num_threads()}")
print(f"cuda: {candle.utils.cuda_is_available()}")
t = candle.Tensor(42.0)
print(t)
print(t.shape, t.rank, t.device)
print(t + t)
t = candle.Tensor([3.0, 1, 4, 1, 5, 9, 2, 6])
print(t)
print(t + t)
t = t.reshape([2, 4])
print(t.matmul(t.t()))
print(t.to_dtype(candle.u8))
print(t.to_dtype("u8"))
t = candle.randn((5, 3))
print(t)
print(t.dtype)
t = candle.randn((16, 256))
quant_t = t.quantize("q6k")
dequant_t = quant_t.dequantize()
diff2 = (t - dequant_t).sqr()
print(diff2.mean_all())
| 0 |
0 | hf_public_repos/candle | hf_public_repos/candle/candle-pyo3/test_pytorch.py | import candle
import torch
# convert from candle tensor to torch tensor
t = candle.randn((3, 512, 512))
torch_tensor = t.to_torch()
print(torch_tensor)
print(type(torch_tensor))
# convert from torch tensor to candle tensor
t = torch.randn((3, 512, 512))
candle_tensor = candle.Tensor(t)
print(candle_tensor)
print(type(candle_tensor))
| 1 |
0 | hf_public_repos/candle | hf_public_repos/candle/candle-pyo3/pyproject.toml | [project]
name = 'candle-nn'
requires-python = '>=3.7'
authors = [
{name = 'The Candle Team'},
]
dynamic = [
'description',
'license',
'readme',
]
[project.urls]
Homepage = 'https://github.com/huggingface/candle'
Source = 'https://github.com/huggingface/candle'
[build-system]
requires = ["maturin>=1.0,<2.0"]
build-backend = "maturin"
[tool.maturin]
python-source = "py_src"
module-name = "candle.candle"
bindings = 'pyo3'
features = ["pyo3/extension-module"]
[tool.black]
line-length = 119
target-version = ['py35']
[project.optional-dependencies]
testing = ["pytest", "black==22.3"]
huggingface = ["transformers>=4.33.3", "huggingface-hub>=0.17.3"] | 2 |
0 | hf_public_repos/candle | hf_public_repos/candle/candle-pyo3/README.md | ## Installation
From the `candle-pyo3` directory, enable a virtual env where you will want the
candle package to be installed then run.
```bash
maturin develop -r
python test.py
```
## Generating Stub Files for Type Hinting
For type hinting support, the `candle-pyo3` package requires `*.pyi` files. You can automatically generate these files using the `stub.py` script.
### Steps:
1. Install the package using `maturin`.
2. Generate the stub files by running:
```
python stub.py
```
### Validation:
To ensure that the stub files match the current implementation, execute:
```
python stub.py --check
```
| 3 |
0 | hf_public_repos/candle | hf_public_repos/candle/candle-pyo3/quant-llama.py | # This example shows how the candle Python api can be used to replicate llama.cpp.
import sys
from typing import Dict, Tuple, Any
import candle
from candle.models.llama import QuantizedLlama
from candle import utils
MAX_SEQ_LEN = 4096
def gguf_rename(tensor_name: str):
if tensor_name == "token_embd.weight":
return "tok_embeddings.weight"
if tensor_name == "output_norm.weight":
return "norm.weight"
tensor_name = tensor_name.replace("blk.", "layers.")
tensor_name = tensor_name.replace(".attn_q.", ".attention.wq.")
tensor_name = tensor_name.replace(".attn_k.", ".attention.wk.")
tensor_name = tensor_name.replace(".attn_v.", ".attention.wv.")
tensor_name = tensor_name.replace(".attn_output.", ".attention.wo.")
tensor_name = tensor_name.replace(".ffn_gate.", ".feed_forward.w1.")
tensor_name = tensor_name.replace(".ffn_down.", ".feed_forward.w2.")
tensor_name = tensor_name.replace(".ffn_up.", ".feed_forward.w3.")
tensor_name = tensor_name.replace(".attn_norm.", ".attention_norm.")
return tensor_name
def main():
if len(sys.argv) < 2:
raise ValueError("missing weight file argument")
filename = sys.argv[1]
print(f"reading model file {filename}")
if filename.endswith("gguf"):
all_tensors, metadata = utils.load_gguf(filename)
vocab = metadata["tokenizer.ggml.tokens"]
for i, v in enumerate(vocab):
vocab[i] = "\n" if v == "<0x0A>" else v.replace("▁", " ")
hparams = {k: v for (k, v) in metadata.items() if not k.startswith("tokenizer")}
print(hparams)
hparams = {
"n_vocab": len(vocab),
"n_embd": metadata["llama.embedding_length"],
"n_mult": 256,
"n_head": metadata["llama.attention.head_count"],
"n_head_kv": metadata["llama.attention.head_count_kv"],
"n_layer": metadata["llama.block_count"],
"n_rot": metadata["llama.rope.dimension_count"],
"rope_freq": metadata.get("llama.rope.freq_base", 10000.0),
"ftype": metadata["general.file_type"],
"context_length": metadata["llama.context_length"],
}
all_tensors = {gguf_rename(k): v for k, v in all_tensors.items()}
else:
all_tensors, hparams, vocab = utils.load_ggml(filename)
hparams["context_length"] = 2048
print(hparams)
model = QuantizedLlama(hparams, all_tensors)
print("model built, starting inference")
tokens = [1]
for token_idx in range(500):
last_token = tokens[-1]
lt = candle.tensor([last_token]).unsqueeze(0)
logits = model.forward(lt, len(tokens))
# Greedy sampling for now
# pr = candle.nn.softmax(logits, -1)
m = logits.get(0).argmax_keepdim(-1)
next_token = m.values()[0]
print(vocab[next_token], end="", flush=True)
tokens.append(next_token)
if __name__ == "__main__":
main()
| 4 |
0 | hf_public_repos/candle/candle-pyo3 | hf_public_repos/candle/candle-pyo3/src/lib.rs | #![allow(clippy::redundant_closure_call)]
use pyo3::exceptions::{PyTypeError, PyValueError};
use pyo3::prelude::*;
use pyo3::pyclass::CompareOp;
use pyo3::types::{IntoPyDict, PyDict, PyTuple};
use pyo3::ToPyObject;
use std::collections::hash_map::DefaultHasher;
use std::hash::{Hash, Hasher};
use std::sync::Arc;
use half::{bf16, f16};
#[cfg(feature = "mkl")]
extern crate intel_mkl_src;
#[cfg(feature = "accelerate")]
extern crate accelerate_src;
use ::candle::{quantized::QTensor, DType, Device, Module, Tensor, WithDType};
mod utils;
use utils::wrap_err;
mod shape;
use shape::{PyShape, PyShapeWithHole};
#[cfg(feature = "onnx")]
mod onnx;
#[derive(Clone, Debug)]
#[pyclass(name = "Tensor")]
/// A `candle` tensor.
struct PyTensor(Tensor);
impl std::ops::Deref for PyTensor {
type Target = Tensor;
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[pyclass(name = "DType")]
/// A `candle` dtype.
struct PyDType(DType);
#[pymethods]
impl PyDType {
fn __repr__(&self) -> String {
format!("{:?}", self.0)
}
fn __str__(&self) -> String {
self.__repr__()
}
}
impl PyDType {
fn from_pyobject(ob: PyObject, py: Python<'_>) -> PyResult<Self> {
use std::str::FromStr;
if let Ok(dtype) = ob.extract::<String>(py) {
let dtype = DType::from_str(&dtype)
.map_err(|_| PyTypeError::new_err(format!("invalid dtype '{dtype}'")))?;
Ok(Self(dtype))
} else {
ob.extract(py)
}
}
}
static CUDA_DEVICE: std::sync::Mutex<Option<Device>> = std::sync::Mutex::new(None);
static METAL_DEVICE: std::sync::Mutex<Option<Device>> = std::sync::Mutex::new(None);
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
enum PyDevice {
Cpu,
Cuda,
Metal,
}
impl PyDevice {
fn from_device(device: &Device) -> Self {
match device {
Device::Cpu => Self::Cpu,
Device::Cuda(_) => Self::Cuda,
Device::Metal(_) => Self::Metal,
}
}
fn as_device(&self) -> PyResult<Device> {
match self {
Self::Cpu => Ok(Device::Cpu),
Self::Cuda => {
let mut device = CUDA_DEVICE.lock().unwrap();
if let Some(device) = device.as_ref() {
return Ok(device.clone());
};
let d = Device::new_cuda(0).map_err(wrap_err)?;
*device = Some(d.clone());
Ok(d)
}
Self::Metal => {
let mut device = METAL_DEVICE.lock().unwrap();
if let Some(device) = device.as_ref() {
return Ok(device.clone());
};
let d = Device::new_metal(0).map_err(wrap_err)?;
*device = Some(d.clone());
Ok(d)
}
}
}
}
impl<'source> FromPyObject<'source> for PyDevice {
fn extract_bound(ob: &Bound<'source, PyAny>) -> PyResult<Self> {
let device: String = ob.extract()?;
let device = match device.as_str() {
"cpu" => PyDevice::Cpu,
"cuda" => PyDevice::Cuda,
_ => Err(PyTypeError::new_err(format!("invalid device '{device}'")))?,
};
Ok(device)
}
}
impl ToPyObject for PyDevice {
fn to_object(&self, py: Python<'_>) -> PyObject {
let str = match self {
PyDevice::Cpu => "cpu",
PyDevice::Cuda => "cuda",
PyDevice::Metal => "metal",
};
str.to_object(py)
}
}
trait PyWithDType: WithDType {
fn to_py(&self, py: Python<'_>) -> PyObject;
}
macro_rules! pydtype {
($ty:ty, $conv:expr) => {
impl PyWithDType for $ty {
fn to_py(&self, py: Python<'_>) -> PyObject {
$conv(*self).to_object(py)
}
}
};
}
pydtype!(i64, |v| v);
pydtype!(u8, |v| v);
pydtype!(u32, |v| v);
pydtype!(f16, f32::from);
pydtype!(bf16, f32::from);
pydtype!(f32, |v| v);
pydtype!(f64, |v| v);
fn actual_index(t: &Tensor, dim: usize, index: i64) -> ::candle::Result<usize> {
let dim = t.dim(dim)?;
if 0 <= index {
let index = index as usize;
if dim <= index {
::candle::bail!("index {index} is too large for tensor dimension {dim}")
}
Ok(index)
} else {
if (dim as i64) < -index {
::candle::bail!("index {index} is too low for tensor dimension {dim}")
}
Ok((dim as i64 + index) as usize)
}
}
fn actual_dim(t: &Tensor, dim: i64) -> ::candle::Result<usize> {
let rank = t.rank();
if 0 <= dim {
let dim = dim as usize;
if rank <= dim {
::candle::bail!("dimension index {dim} is too large for tensor rank {rank}")
}
Ok(dim)
} else {
if (rank as i64) < -dim {
::candle::bail!("dimension index {dim} is too low for tensor rank {rank}")
}
Ok((rank as i64 + dim) as usize)
}
}
// TODO: Something similar to this should probably be a part of candle core.
trait MapDType {
type Output;
fn f<T: PyWithDType>(&self, t: &Tensor) -> PyResult<Self::Output>;
fn map(&self, t: &Tensor) -> PyResult<Self::Output> {
match t.dtype() {
DType::U8 => self.f::<u8>(t),
DType::U32 => self.f::<u32>(t),
DType::I64 => self.f::<i64>(t),
DType::BF16 => self.f::<bf16>(t),
DType::F16 => self.f::<f16>(t),
DType::F32 => self.f::<f32>(t),
DType::F64 => self.f::<f64>(t),
}
}
}
enum Indexer {
Index(usize),
Slice(usize, usize),
Ellipsis,
Expand,
IndexSelect(Tensor),
}
#[derive(Debug)]
struct TorchTensor(PyObject);
impl<'source> pyo3::FromPyObject<'source> for TorchTensor {
fn extract_bound(ob: &Bound<'source, PyAny>) -> PyResult<Self> {
let numpy_value: PyObject = ob.getattr("numpy")?.call0()?.extract()?;
Ok(TorchTensor(numpy_value))
}
}
#[pymethods]
impl PyTensor {
#[new]
#[pyo3(text_signature = "(self, data:_ArrayLike)")]
// TODO: Handle arbitrary input dtype and shape.
/// Creates a new tensor from a Python value. The value can be a scalar or array-like object.
fn new(py: Python<'_>, data: PyObject) -> PyResult<Self> {
use Device::Cpu;
let tensor = if let Ok(vs) = data.extract::<u32>(py) {
Tensor::new(vs, &Cpu).map_err(wrap_err)?
} else if let Ok(vs) = data.extract::<i64>(py) {
Tensor::new(vs, &Cpu).map_err(wrap_err)?
} else if let Ok(vs) = data.extract::<f32>(py) {
Tensor::new(vs, &Cpu).map_err(wrap_err)?
} else if let Ok(vs) = data.extract::<Vec<u32>>(py) {
let len = vs.len();
Tensor::from_vec(vs, len, &Cpu).map_err(wrap_err)?
} else if let Ok(vs) = data.extract::<Vec<i64>>(py) {
let len = vs.len();
Tensor::from_vec(vs, len, &Cpu).map_err(wrap_err)?
} else if let Ok(vs) = data.extract::<Vec<f32>>(py) {
let len = vs.len();
Tensor::from_vec(vs, len, &Cpu).map_err(wrap_err)?
} else if let Ok(vs) = data.extract::<Vec<Vec<u32>>>(py) {
Tensor::new(vs, &Cpu).map_err(wrap_err)?
} else if let Ok(vs) = data.extract::<Vec<Vec<i64>>>(py) {
Tensor::new(vs, &Cpu).map_err(wrap_err)?
} else if let Ok(vs) = data.extract::<Vec<Vec<f32>>>(py) {
Tensor::new(vs, &Cpu).map_err(wrap_err)?
} else if let Ok(vs) = data.extract::<Vec<Vec<Vec<u32>>>>(py) {
Tensor::new(vs, &Cpu).map_err(wrap_err)?
} else if let Ok(vs) = data.extract::<Vec<Vec<Vec<i64>>>>(py) {
Tensor::new(vs, &Cpu).map_err(wrap_err)?
} else if let Ok(vs) = data.extract::<Vec<Vec<Vec<f32>>>>(py) {
Tensor::new(vs, &Cpu).map_err(wrap_err)?
} else if let Ok(TorchTensor(numpy)) = data.extract::<TorchTensor>(py) {
return PyTensor::new(py, numpy);
} else {
let ty = data.bind(py).get_type();
Err(PyTypeError::new_err(format!(
"incorrect type {ty} for tensor"
)))?
};
Ok(Self(tensor))
}
/// Gets the tensor's data as a Python scalar or array-like object.
/// &RETURNS&: _ArrayLike
fn values(&self, py: Python<'_>) -> PyResult<PyObject> {
struct M<'a>(Python<'a>);
impl MapDType for M<'_> {
type Output = PyObject;
fn f<T: PyWithDType>(&self, t: &Tensor) -> PyResult<Self::Output> {
match t.rank() {
0 => Ok(t.to_scalar::<T>().map_err(wrap_err)?.to_py(self.0)),
1 => {
let v = t.to_vec1::<T>().map_err(wrap_err)?;
let v = v.iter().map(|v| v.to_py(self.0)).collect::<Vec<_>>();
Ok(v.to_object(self.0))
}
2 => {
let v = t.to_vec2::<T>().map_err(wrap_err)?;
let v = v
.iter()
.map(|v| v.iter().map(|v| v.to_py(self.0)).collect())
.collect::<Vec<Vec<_>>>();
Ok(v.to_object(self.0))
}
3 => {
let v = t.to_vec3::<T>().map_err(wrap_err)?;
let v = v
.iter()
.map(|v| {
v.iter()
.map(|v| v.iter().map(|v| v.to_py(self.0)).collect())
.collect()
})
.collect::<Vec<Vec<Vec<_>>>>();
Ok(v.to_object(self.0))
}
n => Err(PyTypeError::new_err(format!(
"TODO: conversion to PyObject is not handled for rank {n}"
)))?,
}
}
}
// TODO: Handle arbitrary shapes.
M(py).map(self)
}
/// Converts candle's tensor to pytorch's tensor
/// &RETURNS&: torch.Tensor
fn to_torch(&self, py: Python<'_>) -> PyResult<PyObject> {
let candle_values = self.values(py)?;
let torch_tensor: PyObject = py
.import_bound("torch")?
.getattr("tensor")?
.call1((candle_values,))?
.extract()?;
Ok(torch_tensor)
}
#[getter]
/// Gets the tensor's shape.
/// &RETURNS&: Tuple[int]
fn shape(&self, py: Python<'_>) -> PyObject {
PyTuple::new_bound(py, self.0.dims()).to_object(py)
}
#[getter]
/// Gets the tensor's element count.
/// &RETURNS&: int
fn nelement(&self) -> usize {
self.0.elem_count()
}
#[getter]
/// Gets the tensor's strides.
/// &RETURNS&: Tuple[int]
fn stride(&self, py: Python<'_>) -> PyObject {
PyTuple::new_bound(py, self.0.stride()).to_object(py)
}
#[getter]
/// Gets the tensor's dtype.
/// &RETURNS&: DType
fn dtype(&self) -> PyDType {
PyDType(self.0.dtype())
}
#[getter]
/// Gets the tensor's device.
/// &RETURNS&: Device
fn device(&self, py: Python<'_>) -> PyObject {
PyDevice::from_device(self.0.device()).to_object(py)
}
#[getter]
/// Gets the tensor's rank.
/// &RETURNS&: int
fn rank(&self) -> usize {
self.0.rank()
}
fn __repr__(&self) -> String {
format!("{}", self.0)
}
fn __str__(&self) -> String {
self.__repr__()
}
/// Performs the `abs` operation on the tensor.
/// &RETURNS&: Tensor
fn abs(&self) -> PyResult<Self> {
Ok(PyTensor(self.0.abs().map_err(wrap_err)?))
}
/// Performs the `sin` operation on the tensor.
/// &RETURNS&: Tensor
fn sin(&self) -> PyResult<Self> {
Ok(PyTensor(self.0.sin().map_err(wrap_err)?))
}
/// Performs the `cos` operation on the tensor.
/// &RETURNS&: Tensor
fn cos(&self) -> PyResult<Self> {
Ok(PyTensor(self.0.cos().map_err(wrap_err)?))
}
/// Performs the `log` operation on the tensor.
/// &RETURNS&: Tensor
fn log(&self) -> PyResult<Self> {
Ok(PyTensor(self.0.log().map_err(wrap_err)?))
}
/// Squares the tensor.
/// &RETURNS&: Tensor
fn sqr(&self) -> PyResult<Self> {
Ok(PyTensor(self.0.sqr().map_err(wrap_err)?))
}
/// Calculates the square root of the tensor.
/// &RETURNS&: Tensor
fn sqrt(&self) -> PyResult<Self> {
Ok(PyTensor(self.0.sqrt().map_err(wrap_err)?))
}
/// Get the `recip` of the tensor.
/// &RETURNS&: Tensor
fn recip(&self) -> PyResult<Self> {
Ok(PyTensor(self.0.recip().map_err(wrap_err)?))
}
/// Performs the `exp` operation on the tensor.
/// &RETURNS&: Tensor
fn exp(&self) -> PyResult<Self> {
Ok(PyTensor(self.0.exp().map_err(wrap_err)?))
}
#[pyo3(text_signature = "(self, p:float)")]
/// Performs the `pow` operation on the tensor with the given exponent.
/// &RETURNS&: Tensor
fn powf(&self, p: f64) -> PyResult<Self> {
Ok(PyTensor(self.0.powf(p).map_err(wrap_err)?))
}
#[pyo3(text_signature = "(self, rhs:Tensor, dim:int)")]
/// Select values for the input tensor at the target indexes across the specified dimension.
///
/// The `indexes` is argument is an int tensor with a single dimension.
/// The output has the same number of dimension as the `self` input. The target dimension of
/// the output has length the length of `indexes` and the values are taken from `self` using
/// the index from `indexes`. Other dimensions have the same number of elements as the input
/// tensor.
/// &RETURNS&: Tensor
fn index_select(&self, rhs: &Self, dim: i64) -> PyResult<Self> {
let dim = actual_dim(self, dim).map_err(wrap_err)?;
Ok(PyTensor(self.0.index_select(rhs, dim).map_err(wrap_err)?))
}
/// Gathers values along an axis specified by dim.
fn gather(&self, index: &Self, dim: i64) -> PyResult<Self> {
let dim = actual_dim(self, dim).map_err(wrap_err)?;
Ok(PyTensor(self.0.gather(index, dim).map_err(wrap_err)?))
}
#[pyo3(text_signature = "(self, rhs:Tensor)")]
/// Performs a matrix multiplication between the two tensors.
/// &RETURNS&: Tensor
fn matmul(&self, rhs: &Self) -> PyResult<Self> {
Ok(PyTensor(self.0.matmul(rhs).map_err(wrap_err)?))
}
#[pyo3(text_signature = "(self, rhs:Tensor)")]
/// Adds the two tensors, while broadcasting the right-hand-side tensor to match the shape of the left-hand-side tensor.
/// &RETURNS&: Tensor
fn broadcast_add(&self, rhs: &Self) -> PyResult<Self> {
Ok(PyTensor(self.0.broadcast_add(rhs).map_err(wrap_err)?))
}
#[pyo3(text_signature = "(self, rhs:Tensor)")]
/// Subtracts the two tensors, while broadcasting the right-hand-side tensor to match the shape of the left-hand-side tensor.
/// &RETURNS&: Tensor
fn broadcast_sub(&self, rhs: &Self) -> PyResult<Self> {
Ok(PyTensor(self.0.broadcast_sub(rhs).map_err(wrap_err)?))
}
#[pyo3(text_signature = "(self, rhs:Tensor)")]
/// Multiplies the two tensors, while broadcasting the right-hand-side tensor to match the shape of the left-hand-side tensor.
/// &RETURNS&: Tensor
fn broadcast_mul(&self, rhs: &Self) -> PyResult<Self> {
Ok(PyTensor(self.0.broadcast_mul(rhs).map_err(wrap_err)?))
}
#[pyo3(text_signature = "(self, rhs:Tensor)")]
/// Divides the two tensors, while broadcasting the right-hand-side tensor to match the shape of the left-hand-side tensor.
/// &RETURNS&: Tensor
fn broadcast_div(&self, rhs: &Self) -> PyResult<Self> {
Ok(PyTensor(self.0.broadcast_div(rhs).map_err(wrap_err)?))
}
#[pyo3(text_signature = "(self, on_true:Tensor, on_false:Tensor)")]
/// Returns a tensor with the same shape as the input tensor, the values are taken from
/// `on_true` if the input tensor value is not zero, and `on_false` at the positions where the
/// input tensor is equal to zero.
/// &RETURNS&: Tensor
fn where_cond(&self, on_true: &Self, on_false: &Self) -> PyResult<Self> {
Ok(PyTensor(
self.0.where_cond(on_true, on_false).map_err(wrap_err)?,
))
}
#[getter]
/// Index a tensor.
/// &RETURNS&: Tensor
fn __getitem__(&self, py: Python, idx: PyObject) -> PyResult<Self> {
let mut indexers: Vec<Indexer> = vec![];
let dims = self.0.shape().dims();
fn to_absolute_index(index: isize, current_dim: usize, dims: &[usize]) -> PyResult<usize> {
// Convert a relative index to an absolute index e.g. tensor[-1] -> tensor[0]
let actual_index = if index < 0 {
dims[current_dim] as isize + index
} else {
index
};
// Check that the index is in range
if actual_index < 0 || actual_index >= dims[current_dim] as isize {
return Err(PyValueError::new_err(format!(
"index out of range for dimension '{i}' with indexer '{value}'",
i = current_dim,
value = index
)));
}
Ok(actual_index as usize)
}
fn extract_indexer(
py_indexer: &Bound<PyAny>,
current_dim: usize,
dims: &[usize],
index_argument_count: usize,
) -> PyResult<(Indexer, usize)> {
if let Ok(index) = py_indexer.extract() {
// Handle a single index e.g. tensor[0] or tensor[-1]
Ok((
Indexer::Index(to_absolute_index(index, current_dim, dims)?),
current_dim + 1,
))
} else if let Ok(slice) = py_indexer.downcast::<pyo3::types::PySlice>() {
// Handle a single slice e.g. tensor[0:1] or tensor[0:-1]
let index = slice.indices(dims[current_dim] as isize)?;
Ok((
Indexer::Slice(index.start as usize, index.stop as usize),
current_dim + 1,
))
} else if let Ok(tensor) = py_indexer.extract::<PyTensor>() {
// Handle a tensor as indices e.g. tensor[tensor([0,1])]
let t = tensor.0;
if t.rank() != 1 {
return Err(PyTypeError::new_err(
"multi-dimensional tensor indexing is not supported",
));
}
Ok((Indexer::IndexSelect(t), current_dim + 1))
} else if let Ok(list) = py_indexer.downcast::<pyo3::types::PyList>() {
// Handle a list of indices e.g. tensor[[0,1]]
let mut indexes = vec![];
for item in list.iter() {
let index = item.extract::<i64>()?;
indexes.push(index);
}
Ok((
Indexer::IndexSelect(
Tensor::from_vec(indexes, list.len(), &Device::Cpu).map_err(wrap_err)?,
),
current_dim + 1,
))
} else if py_indexer.is(&py_indexer.py().Ellipsis()) {
// Handle '...' e.g. tensor[..., 0]
if current_dim > 0 {
return Err(PyTypeError::new_err(
"Ellipsis ('...') can only be used at the start of an indexing operation",
));
}
Ok((Indexer::Ellipsis, dims.len() - (index_argument_count - 1)))
} else if py_indexer.is_none() {
// Handle None e.g. tensor[None, 0]
Ok((Indexer::Expand, current_dim))
} else {
Err(PyTypeError::new_err(format!(
"unsupported indexer {}",
py_indexer
)))
}
}
if let Ok(tuple) = idx.downcast_bound::<pyo3::types::PyTuple>(py) {
let not_none_count: usize = tuple.iter().filter(|x| !x.is_none()).count();
if not_none_count > dims.len() {
return Err(PyValueError::new_err("provided too many indices"));
}
let mut current_dim = 0;
for item in tuple.iter() {
let (indexer, new_current_dim) =
extract_indexer(&item, current_dim, dims, not_none_count)?;
current_dim = new_current_dim;
indexers.push(indexer);
}
} else {
let (indexer, _) = extract_indexer(idx.downcast_bound::<PyAny>(py)?, 0, dims, 1)?;
indexers.push(indexer);
}
let mut x = self.0.clone();
let mut current_dim = 0;
// Apply the indexers
for indexer in indexers.iter() {
x = match indexer {
Indexer::Index(n) => x
.narrow(current_dim, *n, 1)
.map_err(wrap_err)?
.squeeze(current_dim)
.map_err(wrap_err)?,
Indexer::Slice(start, stop) => {
let out = x
.narrow(current_dim, *start, stop.saturating_sub(*start))
.map_err(wrap_err)?;
current_dim += 1;
out
}
Indexer::Ellipsis => {
// Ellipsis is a special case, it means that all remaining dimensions should be
// selected => advance the current_dim to the last dimension we have indexers for
current_dim += dims.len() - (indexers.len() - 1);
x
}
Indexer::Expand => {
// Expand is a special case, it means that a new dimension should be added => unsqueeze and advance the current_dim
let out = x.unsqueeze(current_dim).map_err(wrap_err)?;
current_dim += 1;
out
}
Indexer::IndexSelect(indexes) => {
let out = x
.index_select(
&indexes.to_device(x.device()).map_err(wrap_err)?,
current_dim,
)
.map_err(wrap_err)?;
current_dim += 1;
out
}
}
}
Ok(Self(x))
}
/// Add two tensors.
/// &RETURNS&: Tensor
fn __add__(&self, rhs: &Bound<PyAny>) -> PyResult<Self> {
let tensor = if let Ok(rhs) = rhs.extract::<Self>() {
self.0.broadcast_add(&rhs.0).map_err(wrap_err)?
} else if let Ok(rhs) = rhs.extract::<f64>() {
(&self.0 + rhs).map_err(wrap_err)?
} else {
Err(PyTypeError::new_err("unsupported rhs for add"))?
};
Ok(Self(tensor))
}
fn __radd__(&self, rhs: &Bound<PyAny>) -> PyResult<Self> {
self.__add__(rhs)
}
/// Multiply two tensors.
/// &RETURNS&: Tensor
fn __mul__(&self, rhs: &Bound<PyAny>) -> PyResult<Self> {
let tensor = if let Ok(rhs) = rhs.extract::<Self>() {
self.0.broadcast_mul(&rhs.0).map_err(wrap_err)?
} else if let Ok(rhs) = rhs.extract::<f64>() {
(&self.0 * rhs).map_err(wrap_err)?
} else {
Err(PyTypeError::new_err("unsupported rhs for mul"))?
};
Ok(Self(tensor))
}
fn __rmul__(&self, rhs: &Bound<PyAny>) -> PyResult<Self> {
self.__mul__(rhs)
}
/// Subtract two tensors.
/// &RETURNS&: Tensor
fn __sub__(&self, rhs: &Bound<PyAny>) -> PyResult<Self> {
let tensor = if let Ok(rhs) = rhs.extract::<Self>() {
self.0.broadcast_sub(&rhs.0).map_err(wrap_err)?
} else if let Ok(rhs) = rhs.extract::<f64>() {
(&self.0 - rhs).map_err(wrap_err)?
} else {
Err(PyTypeError::new_err("unsupported rhs for sub"))?
};
Ok(Self(tensor))
}
/// Divide two tensors.
/// &RETURNS&: Tensor
fn __truediv__(&self, rhs: &Bound<PyAny>) -> PyResult<Self> {
let tensor = if let Ok(rhs) = rhs.extract::<Self>() {
self.0.broadcast_div(&rhs.0).map_err(wrap_err)?
} else if let Ok(rhs) = rhs.extract::<f64>() {
(&self.0 / rhs).map_err(wrap_err)?
} else {
Err(PyTypeError::new_err("unsupported rhs for div"))?
};
Ok(Self(tensor))
}
/// Rich-compare two tensors.
/// &RETURNS&: Tensor
fn __richcmp__(&self, rhs: &Bound<PyAny>, op: CompareOp) -> PyResult<Self> {
let compare = |lhs: &Tensor, rhs: &Tensor| {
let t = match op {
CompareOp::Eq => lhs.eq(rhs),
CompareOp::Ne => lhs.ne(rhs),
CompareOp::Lt => lhs.lt(rhs),
CompareOp::Le => lhs.le(rhs),
CompareOp::Gt => lhs.gt(rhs),
CompareOp::Ge => lhs.ge(rhs),
};
Ok(PyTensor(t.map_err(wrap_err)?))
};
if let Ok(rhs) = rhs.extract::<PyTensor>() {
if self.0.shape() == rhs.0.shape() {
compare(&self.0, &rhs.0)
} else {
// We broadcast manually here because `candle.cmp` does not support automatic broadcasting
let broadcast_shape = self
.0
.shape()
.broadcast_shape_binary_op(rhs.0.shape(), "cmp")
.map_err(wrap_err)?;
let broadcasted_lhs = self.0.broadcast_as(&broadcast_shape).map_err(wrap_err)?;
let broadcasted_rhs = rhs.0.broadcast_as(&broadcast_shape).map_err(wrap_err)?;
compare(&broadcasted_lhs, &broadcasted_rhs)
}
} else if let Ok(rhs) = rhs.extract::<f64>() {
let scalar_tensor = Tensor::new(rhs, self.0.device())
.map_err(wrap_err)?
.to_dtype(self.0.dtype())
.map_err(wrap_err)?
.broadcast_as(self.0.shape())
.map_err(wrap_err)?;
compare(&self.0, &scalar_tensor)
} else {
return Err(PyTypeError::new_err("unsupported rhs for __richcmp__"));
}
}
fn __hash__(&self) -> u64 {
// we have overridden __richcmp__ => py03 wants us to also override __hash__
// we simply hash the address of the tensor
let mut hasher = DefaultHasher::new();
let pointer = &self.0 as *const Tensor;
let address = pointer as usize;
address.hash(&mut hasher);
hasher.finish()
}
#[pyo3(signature=(*shape), text_signature = "(self, *shape:Shape)")]
/// Reshapes the tensor to the given shape.
/// &RETURNS&: Tensor
fn reshape(&self, shape: PyShapeWithHole) -> PyResult<Self> {
Ok(PyTensor(
self.0
.reshape(shape.to_absolute(&self.0)?)
.map_err(wrap_err)?,
))
}
#[pyo3(signature=(*shape), text_signature = "(self, *shape:Shape)")]
/// Broadcasts the tensor to the given shape.
/// &RETURNS&: Tensor
fn broadcast_as(&self, shape: PyShapeWithHole) -> PyResult<Self> {
Ok(PyTensor(
self.0
.broadcast_as(shape.to_absolute(&self.0)?)
.map_err(wrap_err)?,
))
}
#[pyo3(signature=(*shape), text_signature = "(self, *shape:Shape)")]
/// Broadcasts the tensor to the given shape, adding new dimensions on the left.
/// &RETURNS&: Tensor
fn broadcast_left(&self, shape: PyShapeWithHole) -> PyResult<Self> {
Ok(PyTensor(
self.0
.broadcast_left(shape.to_absolute(&self.0)?)
.map_err(wrap_err)?,
))
}
#[pyo3(text_signature = "(self, dim:int)")]
/// Creates a new tensor with the specified dimension removed if its size was one.
/// &RETURNS&: Tensor
fn squeeze(&self, dim: i64) -> PyResult<Self> {
let dim = actual_dim(self, dim).map_err(wrap_err)?;
Ok(PyTensor(self.0.squeeze(dim).map_err(wrap_err)?))
}
#[pyo3(text_signature = "(self, dim:int)")]
/// Creates a new tensor with a dimension of size one inserted at the specified position.
/// &RETURNS&: Tensor
fn unsqueeze(&self, dim: usize) -> PyResult<Self> {
Ok(PyTensor(self.0.unsqueeze(dim).map_err(wrap_err)?))
}
#[pyo3(text_signature = "(self, index:int)")]
/// Gets the value at the specified index.
/// &RETURNS&: Tensor
fn get(&self, index: i64) -> PyResult<Self> {
let index = actual_index(self, 0, index).map_err(wrap_err)?;
Ok(PyTensor(self.0.get(index).map_err(wrap_err)?))
}
#[pyo3(text_signature = "(self, dim1:int, dim2:int)")]
/// Returns a tensor that is a transposed version of the input, the given dimensions are swapped.
/// &RETURNS&: Tensor
fn transpose(&self, dim1: usize, dim2: usize) -> PyResult<Self> {
Ok(PyTensor(self.0.transpose(dim1, dim2).map_err(wrap_err)?))
}
#[pyo3(text_signature = "(self, dim:int, start:int, len:int)")]
/// Returns a new tensor that is a narrowed version of the input, the dimension `dim`
/// ranges from `start` to `start + len`.
/// &RETURNS&: Tensor
fn narrow(&self, dim: i64, start: i64, len: usize) -> PyResult<Self> {
let dim = actual_dim(self, dim).map_err(wrap_err)?;
let start = actual_index(self, dim, start).map_err(wrap_err)?;
Ok(PyTensor(self.0.narrow(dim, start, len).map_err(wrap_err)?))
}
#[pyo3(text_signature = "(self, dim:int)")]
/// Returns the indices of the maximum value(s) across the selected dimension.
/// &RETURNS&: Tensor
fn argmax_keepdim(&self, dim: i64) -> PyResult<Self> {
let dim = actual_dim(self, dim).map_err(wrap_err)?;
Ok(PyTensor(self.0.argmax_keepdim(dim).map_err(wrap_err)?))
}
#[pyo3(text_signature = "(self, dim:int)")]
/// Returns the indices of the minimum value(s) across the selected dimension.
/// &RETURNS&: Tensor
fn argmin_keepdim(&self, dim: i64) -> PyResult<Self> {
let dim = actual_dim(self, dim).map_err(wrap_err)?;
Ok(PyTensor(self.0.argmin_keepdim(dim).map_err(wrap_err)?))
}
#[pyo3(text_signature = "(self, dim:int)")]
/// Gathers the maximum value across the selected dimension.
/// &RETURNS&: Tensor
fn max_keepdim(&self, dim: i64) -> PyResult<Self> {
let dim = actual_dim(self, dim).map_err(wrap_err)?;
Ok(PyTensor(self.0.max_keepdim(dim).map_err(wrap_err)?))
}
#[pyo3(text_signature = "(self, dim:int)")]
/// Gathers the minimum value across the selected dimension.
/// &RETURNS&: Tensor
fn min_keepdim(&self, dim: i64) -> PyResult<Self> {
let dim = actual_dim(self, dim).map_err(wrap_err)?;
Ok(PyTensor(self.0.min_keepdim(dim).map_err(wrap_err)?))
}
#[pyo3(text_signature = "(self, dim:Union[int, List[int]])")]
/// Returns the sum of all elements in the input tensor. The sum is performed over all the input dimensions.
/// &RETURNS&: Tensor
fn sum_keepdim(&self, dims: PyObject, py: Python<'_>) -> PyResult<Self> {
let dims = if let Ok(dim) = dims.extract::<usize>(py) {
vec![dim]
} else {
dims.extract::<Vec<usize>>(py)?
};
Ok(PyTensor(
self.0.sum_keepdim(dims.as_slice()).map_err(wrap_err)?,
))
}
/// Returns the sum of the tensor.
/// &RETURNS&: Tensor
fn sum_all(&self) -> PyResult<Self> {
Ok(PyTensor(self.0.sum_all().map_err(wrap_err)?))
}
/// Returns the mean of the tensor.
/// &RETURNS&: Tensor
fn mean_all(&self) -> PyResult<Self> {
let elements = self.0.elem_count();
let sum = self.0.sum_all().map_err(wrap_err)?;
let mean = (sum / elements as f64).map_err(wrap_err)?;
Ok(PyTensor(mean))
}
#[pyo3(text_signature = "(self, dim:int)")]
/// Flattens the tensor on the dimension indexes from `dim` (inclusive) to the last dimension.
/// &RETURNS&: Tensor
fn flatten_from(&self, dim: i64) -> PyResult<Self> {
let dim = actual_dim(self, dim).map_err(wrap_err)?;
Ok(PyTensor(self.0.flatten_from(dim).map_err(wrap_err)?))
}
#[pyo3(text_signature = "(self, dim:int)")]
///Flattens the tensor on the dimension indexes from `0` to `dim` (inclusive).
/// &RETURNS&: Tensor
fn flatten_to(&self, dim: i64) -> PyResult<Self> {
let dim = actual_dim(self, dim).map_err(wrap_err)?;
Ok(PyTensor(self.0.flatten_to(dim).map_err(wrap_err)?))
}
/// Flattens the tensor into a 1D tensor.
/// &RETURNS&: Tensor
fn flatten_all(&self) -> PyResult<Self> {
Ok(PyTensor(self.0.flatten_all().map_err(wrap_err)?))
}
/// Transposes the tensor.
/// &RETURNS&: Tensor
fn t(&self) -> PyResult<Self> {
Ok(PyTensor(self.0.t().map_err(wrap_err)?))
}
/// Makes the tensor contiguous in memory.
/// &RETURNS&: Tensor
fn contiguous(&self) -> PyResult<Self> {
Ok(PyTensor(self.0.contiguous().map_err(wrap_err)?))
}
/// Returns true if the tensor is contiguous in C order.
/// &RETURNS&: bool
fn is_contiguous(&self) -> bool {
self.0.is_contiguous()
}
/// Returns true if the tensor is contiguous in Fortran order.
/// &RETURNS&: bool
fn is_fortran_contiguous(&self) -> bool {
self.0.is_fortran_contiguous()
}
/// Detach the tensor from the computation graph.
/// &RETURNS&: Tensor
fn detach(&self) -> Self {
PyTensor(self.0.detach())
}
/// Returns a copy of the tensor.
/// &RETURNS&: Tensor
fn copy(&self) -> PyResult<Self> {
Ok(PyTensor(self.0.copy().map_err(wrap_err)?))
}
#[pyo3(signature = (*args, **kwargs), text_signature = "(self, *args, **kwargs)")]
/// Performs Tensor dtype and/or device conversion.
/// &RETURNS&: Tensor
fn to(&self, args: &Bound<PyTuple>, kwargs: Option<&Bound<PyDict>>) -> PyResult<Self> {
let mut device: Option<PyDevice> = None;
let mut dtype: Option<PyDType> = None;
let mut other: Option<PyTensor> = None;
fn handle_duplicates<T>(
opt: &mut Option<T>,
extraction_result: PyResult<T>,
err_msg: &'static str,
) -> PyResult<()> {
if let Ok(successful_extraction) = extraction_result {
if opt.is_some() {
return Err(PyValueError::new_err(err_msg));
}
*opt = Some(successful_extraction);
}
Ok(())
}
//handle args
for arg in args.iter() {
if arg.extract::<PyDevice>().is_ok() {
handle_duplicates(
&mut device,
arg.extract::<PyDevice>(),
"cannot specify multiple devices",
)?;
} else if arg.extract::<PyDType>().is_ok() {
handle_duplicates(
&mut dtype,
arg.extract::<PyDType>(),
"cannot specify multiple dtypes",
)?;
} else if arg.extract::<PyTensor>().is_ok() {
handle_duplicates(
&mut other,
arg.extract::<PyTensor>(),
"cannot specify multiple output tensors",
)?;
} else {
return Err(PyTypeError::new_err(format!(
"unsupported argument type `{:#?}`",
arg.get_type().name()
)));
}
}
if let Some(kwargs) = kwargs {
if let Ok(Some(any)) = kwargs.get_item("dtype") {
handle_duplicates(
&mut dtype,
any.extract::<PyDType>(),
"cannot specify multiple dtypes",
)?;
}
if let Ok(Some(any)) = kwargs.get_item("device") {
handle_duplicates(
&mut device,
any.extract::<PyDevice>(),
"cannot specify multiple devices",
)?;
}
if let Ok(Some(any)) = kwargs.get_item("other") {
handle_duplicates(
&mut other,
any.extract::<PyTensor>(),
"cannot specify multiple output tensors",
)?;
}
}
if let Some(other) = other {
if device.is_some() {
return Err(PyValueError::new_err(
"cannot specify both an output tensor and a device",
));
}
if dtype.is_some() {
return Err(PyValueError::new_err(
"cannot specify both an output tensor and a dtype",
));
}
dtype = Some(other.dtype());
device = Some(PyDevice::from_device(other.0.device()));
}
let result = match (device, dtype) {
(Some(device), Some(dtype)) => self
.0
.to_device(&device.as_device()?)
.map_err(wrap_err)?
.to_dtype(dtype.0)
.map_err(wrap_err)?,
(Some(device), None) => self.0.to_device(&device.as_device()?).map_err(wrap_err)?,
(None, Some(dtype)) => self.0.to_dtype(dtype.0).map_err(wrap_err)?,
(None, None) => return Err(PyTypeError::new_err("No valid dtype or device specified")),
};
Ok(PyTensor(result))
}
#[pyo3(text_signature = "(self, dtype:Union[str,DType])")]
/// Convert the tensor to a new dtype.
/// &RETURNS&: Tensor
fn to_dtype(&self, dtype: PyObject, py: Python<'_>) -> PyResult<Self> {
let dtype = PyDType::from_pyobject(dtype, py)?;
Ok(PyTensor(self.0.to_dtype(dtype.0).map_err(wrap_err)?))
}
#[pyo3(text_signature = "(self, device:Union[str,Device])")]
/// Move the tensor to a new device.
/// &RETURNS&: Tensor
fn to_device(&self, device: PyDevice) -> PyResult<Self> {
let device = device.as_device()?;
Ok(PyTensor(self.0.to_device(&device).map_err(wrap_err)?))
}
#[pyo3(text_signature = "(self, quantized_dtype:str)")]
/// Quantize the tensor.
/// &RETURNS&: QTensor
fn quantize(&self, quantized_dtype: &str) -> PyResult<PyQTensor> {
use ::candle::quantized;
let res = match quantized_dtype.to_lowercase().as_str() {
"q2k" => quantized::QTensor::quantize(self, quantized::GgmlDType::Q2K),
"q3k" => quantized::QTensor::quantize(self, quantized::GgmlDType::Q3K),
"q4_0" => quantized::QTensor::quantize(self, quantized::GgmlDType::Q4_0),
"q4_1" => quantized::QTensor::quantize(self, quantized::GgmlDType::Q4_1),
"q4k" => quantized::QTensor::quantize(self, quantized::GgmlDType::Q4K),
"q5_0" => quantized::QTensor::quantize(self, quantized::GgmlDType::Q5_0),
"q5_1" => quantized::QTensor::quantize(self, quantized::GgmlDType::Q5_1),
"q5k" => quantized::QTensor::quantize(self, quantized::GgmlDType::Q5K),
"q6k" => quantized::QTensor::quantize(self, quantized::GgmlDType::Q6K),
"q8_0" => quantized::QTensor::quantize(self, quantized::GgmlDType::Q8_0),
"q8_1" => quantized::QTensor::quantize(self, quantized::GgmlDType::Q8_1),
"q8k" => quantized::QTensor::quantize(self, quantized::GgmlDType::Q8K),
"f16" => quantized::QTensor::quantize(self, quantized::GgmlDType::F16),
"f32" => quantized::QTensor::quantize(self, quantized::GgmlDType::F32),
dt => {
return Err(PyErr::new::<PyValueError, _>(format!(
"unknown quantized-dtype {dt}"
)))
}
};
Ok(PyQTensor(Arc::new(res.map_err(wrap_err)?)))
}
}
#[pyfunction]
#[pyo3(text_signature = "(tensors:List[Tensor], dim:int )")]
/// Concatenate the tensors across one axis.
/// &RETURNS&: Tensor
fn cat(tensors: Vec<PyTensor>, dim: i64) -> PyResult<PyTensor> {
if tensors.is_empty() {
return Err(PyErr::new::<PyValueError, _>("empty input to cat"));
}
let dim = actual_dim(&tensors[0], dim).map_err(wrap_err)?;
let tensors = tensors.into_iter().map(|t| t.0).collect::<Vec<_>>();
let tensor = Tensor::cat(&tensors, dim).map_err(wrap_err)?;
Ok(PyTensor(tensor))
}
#[pyfunction]
#[pyo3(text_signature = "(tensors:List[Tensor], dim:int)")]
/// Stack the tensors along a new axis.
/// &RETURNS&: Tensor
fn stack(tensors: Vec<PyTensor>, dim: usize) -> PyResult<PyTensor> {
let tensors = tensors.into_iter().map(|t| t.0).collect::<Vec<_>>();
let tensor = Tensor::stack(&tensors, dim).map_err(wrap_err)?;
Ok(PyTensor(tensor))
}
#[pyfunction]
#[pyo3(text_signature = "(data:_ArrayLike)")]
/// Creates a new tensor from a Python value. The value can be a scalar or array-like object.
/// &RETURNS&: Tensor
fn tensor(py: Python<'_>, data: PyObject) -> PyResult<PyTensor> {
PyTensor::new(py, data)
}
#[pyfunction]
#[pyo3(signature = (*shape,device=None), text_signature = "(*shape:Shape, device:Optional[Device]=None)")]
/// Creates a new tensor with random values.
/// &RETURNS&: Tensor
fn rand(_py: Python<'_>, shape: PyShape, device: Option<PyDevice>) -> PyResult<PyTensor> {
let device = device.unwrap_or(PyDevice::Cpu).as_device()?;
let tensor = Tensor::rand(0f32, 1f32, shape, &device).map_err(wrap_err)?;
Ok(PyTensor(tensor))
}
#[pyfunction]
#[pyo3(signature = (*shape,device=None), text_signature = "(*shape:Shape, device:Optional[Device]=None)")]
/// Creates a new tensor with random values from a normal distribution.
/// &RETURNS&: Tensor
fn randn(_py: Python<'_>, shape: PyShape, device: Option<PyDevice>) -> PyResult<PyTensor> {
let device = device.unwrap_or(PyDevice::Cpu).as_device()?;
let tensor = Tensor::randn(0f32, 1f32, shape, &device).map_err(wrap_err)?;
Ok(PyTensor(tensor))
}
#[pyfunction]
#[pyo3(signature = (*shape, dtype=None, device=None),text_signature = "(*shape:Shape, dtype:Optional[DType]=None, device:Optional[Device]=None)")]
/// Creates a new tensor filled with ones.
/// &RETURNS&: Tensor
fn ones(
py: Python<'_>,
shape: PyShape,
dtype: Option<PyObject>,
device: Option<PyDevice>,
) -> PyResult<PyTensor> {
let dtype = match dtype {
None => DType::F32,
Some(dtype) => PyDType::from_pyobject(dtype, py)?.0,
};
let device = device.unwrap_or(PyDevice::Cpu).as_device()?;
let tensor = Tensor::ones(shape, dtype, &device).map_err(wrap_err)?;
Ok(PyTensor(tensor))
}
#[pyfunction]
#[pyo3(signature = (*shape, dtype=None, device=None), text_signature = "(*shape:Shape, dtype:Optional[DType]=None, device:Optional[Device]=None)")]
/// Creates a new tensor filled with zeros.
/// &RETURNS&: Tensor
fn zeros(
py: Python<'_>,
shape: PyShape,
dtype: Option<PyObject>,
device: Option<PyDevice>,
) -> PyResult<PyTensor> {
let dtype = match dtype {
None => DType::F32,
Some(dtype) => PyDType::from_pyobject(dtype, py)?.0,
};
let device = device.unwrap_or(PyDevice::Cpu).as_device()?;
let tensor = Tensor::zeros(shape, dtype, &device).map_err(wrap_err)?;
Ok(PyTensor(tensor))
}
#[derive(Debug, Clone)]
#[pyclass(name = "QTensor")]
/// A quantized tensor.
struct PyQTensor(Arc<QTensor>);
impl std::ops::Deref for PyQTensor {
type Target = QTensor;
fn deref(&self) -> &Self::Target {
self.0.as_ref()
}
}
#[pymethods]
impl PyQTensor {
#[getter]
///Gets the tensors quantized dtype.
/// &RETURNS&: str
fn ggml_dtype(&self) -> String {
format!("{:?}", self.0.dtype())
}
#[getter]
///Gets the rank of the tensor.
/// &RETURNS&: int
fn rank(&self) -> usize {
self.0.rank()
}
#[getter]
///Gets the shape of the tensor.
/// &RETURNS&: Tuple[int]
fn shape(&self, py: Python<'_>) -> PyObject {
PyTuple::new_bound(py, self.0.shape().dims()).to_object(py)
}
fn __repr__(&self) -> String {
format!("{:?}", self.0)
}
fn __str__(&self) -> String {
self.__repr__()
}
/// Dequantizes the tensor.
/// &RETURNS&: Tensor
fn dequantize(&self) -> PyResult<PyTensor> {
let tensor = self.0.dequantize(&Device::Cpu).map_err(wrap_err)?;
Ok(PyTensor(tensor))
}
#[pyo3(text_signature = "(self, lhs:Tensor)")]
/// Performs a quantized matrix multiplication, with the quantized tensor as the right hand side.
/// &RETURNS&: Tensor
fn matmul_t(&self, lhs: &PyTensor) -> PyResult<PyTensor> {
let qmatmul = ::candle::quantized::QMatMul::from_arc(self.0.clone()).map_err(wrap_err)?;
let res = qmatmul.forward(lhs).map_err(wrap_err)?;
Ok(PyTensor(res))
}
}
#[pyfunction]
#[pyo3(text_signature = "(path:Union[str,PathLike])")]
/// Loads a safetensors file. Returns a dictionary mapping tensor names to tensors.
/// &RETURNS&: Dict[str,Tensor]
fn load_safetensors(path: &str, py: Python<'_>) -> PyResult<PyObject> {
let res = ::candle::safetensors::load(path, &Device::Cpu).map_err(wrap_err)?;
let res = res
.into_iter()
.map(|(key, value)| (key, PyTensor(value).into_py(py)))
.collect::<Vec<_>>();
Ok(res.into_py_dict_bound(py).to_object(py))
}
#[pyfunction]
#[pyo3(text_signature = "(path:Union[str,PathLike], tensors:Dict[str,Tensor])")]
/// Saves a dictionary of tensors to a safetensors file.
/// &RETURNS&: None
fn save_safetensors(
path: &str,
tensors: std::collections::HashMap<String, PyTensor>,
) -> PyResult<()> {
let tensors = tensors
.into_iter()
.map(|(s, t)| (s, t.0))
.collect::<std::collections::HashMap<_, _>>();
::candle::safetensors::save(&tensors, path).map_err(wrap_err)
}
#[pyfunction]
#[pyo3(signature = (path, device = None))]
/// Load a GGML file. Returns a tuple of three objects: a dictionary mapping tensor names to tensors,
/// a dictionary mapping hyperparameter names to hyperparameter values, and a vocabulary.
/// &RETURNS&: Tuple[Dict[str,QTensor], Dict[str,Any], List[str]]
fn load_ggml(
path: &str,
device: Option<PyDevice>,
py: Python<'_>,
) -> PyResult<(PyObject, PyObject, PyObject)> {
let mut file = std::fs::File::open(path)?;
let device = device.unwrap_or(PyDevice::Cpu).as_device()?;
let ggml =
::candle::quantized::ggml_file::Content::read(&mut file, &device).map_err(wrap_err)?;
let tensors = ggml
.tensors
.into_iter()
.map(|(key, qtensor)| Ok((key, PyQTensor(Arc::new(qtensor)).into_py(py))))
.collect::<::candle::Result<Vec<_>>>()
.map_err(wrap_err)?;
let tensors = tensors.into_py_dict_bound(py).to_object(py);
let hparams = [
("n_vocab", ggml.hparams.n_vocab),
("n_embd", ggml.hparams.n_embd),
("n_mult", ggml.hparams.n_mult),
("n_head", ggml.hparams.n_head),
("n_layer", ggml.hparams.n_layer),
("n_rot", ggml.hparams.n_rot),
("ftype", ggml.hparams.ftype),
];
let hparams = hparams.into_py_dict_bound(py).to_object(py);
let vocab = ggml
.vocab
.token_score_pairs
.iter()
.map(|(bytes, _)| String::from_utf8_lossy(bytes.as_slice()).to_string())
.collect::<Vec<String>>()
.to_object(py);
Ok((tensors, hparams, vocab))
}
#[pyfunction]
#[pyo3(signature = (path, device = None))]
/// Loads a GGUF file. Returns a tuple of two dictionaries: the first maps tensor names to tensors,
/// and the second maps metadata keys to metadata values.
/// &RETURNS&: Tuple[Dict[str,QTensor], Dict[str,Any]]
fn load_gguf(
path: &str,
device: Option<PyDevice>,
py: Python<'_>,
) -> PyResult<(PyObject, PyObject)> {
let device = device.unwrap_or(PyDevice::Cpu).as_device()?;
use ::candle::quantized::gguf_file;
fn gguf_value_to_pyobject(v: &gguf_file::Value, py: Python<'_>) -> PyResult<PyObject> {
let v: PyObject = match v {
gguf_file::Value::U8(x) => x.into_py(py),
gguf_file::Value::I8(x) => x.into_py(py),
gguf_file::Value::U16(x) => x.into_py(py),
gguf_file::Value::I16(x) => x.into_py(py),
gguf_file::Value::U32(x) => x.into_py(py),
gguf_file::Value::I32(x) => x.into_py(py),
gguf_file::Value::U64(x) => x.into_py(py),
gguf_file::Value::I64(x) => x.into_py(py),
gguf_file::Value::F32(x) => x.into_py(py),
gguf_file::Value::F64(x) => x.into_py(py),
gguf_file::Value::Bool(x) => x.into_py(py),
gguf_file::Value::String(x) => x.into_py(py),
gguf_file::Value::Array(x) => {
let list = pyo3::types::PyList::empty_bound(py);
for elem in x.iter() {
list.append(gguf_value_to_pyobject(elem, py)?)?;
}
list.into()
}
};
Ok(v)
}
let mut file = std::fs::File::open(path)?;
let gguf = gguf_file::Content::read(&mut file).map_err(wrap_err)?;
let tensors = gguf
.tensor_infos
.keys()
.map(|key| {
let qtensor = gguf.tensor(&mut file, key, &device)?;
Ok((key, PyQTensor(Arc::new(qtensor)).into_py(py)))
})
.collect::<::candle::Result<Vec<_>>>()
.map_err(wrap_err)?;
let tensors = tensors.into_py_dict_bound(py).to_object(py);
let metadata = gguf
.metadata
.iter()
.map(|(key, value)| Ok((key, gguf_value_to_pyobject(value, py)?)))
.collect::<PyResult<Vec<_>>>()?
.into_py_dict_bound(py)
.to_object(py);
Ok((tensors, metadata))
}
#[pyfunction]
#[pyo3(
signature = (path, tensors, metadata)
)]
/// Save quanitzed tensors and metadata to a GGUF file.
fn save_gguf(path: &str, tensors: PyObject, metadata: PyObject, py: Python<'_>) -> PyResult<()> {
use ::candle::quantized::gguf_file;
fn pyobject_to_gguf_value(v: &Bound<PyAny>, py: Python<'_>) -> PyResult<gguf_file::Value> {
let v: gguf_file::Value = if let Ok(x) = v.extract::<u8>() {
gguf_file::Value::U8(x)
} else if let Ok(x) = v.extract::<i8>() {
gguf_file::Value::I8(x)
} else if let Ok(x) = v.extract::<u16>() {
gguf_file::Value::U16(x)
} else if let Ok(x) = v.extract::<i16>() {
gguf_file::Value::I16(x)
} else if let Ok(x) = v.extract::<u32>() {
gguf_file::Value::U32(x)
} else if let Ok(x) = v.extract::<i32>() {
gguf_file::Value::I32(x)
} else if let Ok(x) = v.extract::<u64>() {
gguf_file::Value::U64(x)
} else if let Ok(x) = v.extract::<i64>() {
gguf_file::Value::I64(x)
} else if let Ok(x) = v.extract::<f32>() {
gguf_file::Value::F32(x)
} else if let Ok(x) = v.extract::<f64>() {
gguf_file::Value::F64(x)
} else if let Ok(x) = v.extract::<bool>() {
gguf_file::Value::Bool(x)
} else if let Ok(x) = v.extract::<String>() {
gguf_file::Value::String(x)
} else if let Ok(x) = v.extract::<Vec<PyObject>>() {
let x = x
.into_iter()
.map(|f| pyobject_to_gguf_value(f.bind(py), py))
.collect::<PyResult<Vec<_>>>()?;
gguf_file::Value::Array(x)
} else {
return Err(PyErr::new::<PyValueError, _>(format!(
"unsupported type {:?}",
v
)));
};
Ok(v)
}
let tensors = tensors
.downcast_bound::<PyDict>(py)
.map_err(|_| PyErr::new::<PyValueError, _>("expected a dict"))?
.iter()
.map(|(key, value)| {
Ok((
key.extract::<String>()
.map_err(|_| PyErr::new::<PyValueError, _>("keys must be strings"))?,
value.extract::<PyQTensor>()?.0,
))
})
.collect::<PyResult<Vec<_>>>()?;
let metadata = metadata
.downcast_bound::<PyDict>(py)
.map_err(|_| PyErr::new::<PyValueError, _>("expected a dict"))?
.iter()
.map(|(key, value)| {
Ok((
key.extract::<String>()
.map_err(|_| PyErr::new::<PyValueError, _>("keys must be strings"))?,
pyobject_to_gguf_value(&value.as_borrowed(), py)?,
))
})
.collect::<PyResult<Vec<_>>>()?;
let converted_metadata: Vec<_> = metadata
.iter()
.map(|(name, value)| (name.as_str(), value))
.collect();
let converted_tensors: Vec<_> = tensors
.iter()
.map(|(name, tensor)| (name.as_str(), tensor.as_ref()))
.collect();
let mut file = std::fs::File::create(path)?;
gguf_file::write(&mut file, &converted_metadata, &converted_tensors).map_err(wrap_err)
}
#[pyfunction]
/// Returns true if the 'cuda' backend is available.
/// &RETURNS&: bool
fn cuda_is_available() -> bool {
::candle::utils::cuda_is_available()
}
#[pyfunction]
/// Returns true if candle was compiled with 'accelerate' support.
/// &RETURNS&: bool
fn has_accelerate() -> bool {
::candle::utils::has_accelerate()
}
#[pyfunction]
/// Returns true if candle was compiled with MKL support.
/// &RETURNS&: bool
fn has_mkl() -> bool {
::candle::utils::has_mkl()
}
#[pyfunction]
/// Returns the number of threads used by the candle.
/// &RETURNS&: int
fn get_num_threads() -> usize {
::candle::utils::get_num_threads()
}
fn candle_utils(_py: Python<'_>, m: &Bound<'_, PyModule>) -> PyResult<()> {
m.add_function(wrap_pyfunction!(cuda_is_available, m)?)?;
m.add_function(wrap_pyfunction!(get_num_threads, m)?)?;
m.add_function(wrap_pyfunction!(has_accelerate, m)?)?;
m.add_function(wrap_pyfunction!(has_mkl, m)?)?;
m.add_function(wrap_pyfunction!(load_ggml, m)?)?;
m.add_function(wrap_pyfunction!(load_gguf, m)?)?;
m.add_function(wrap_pyfunction!(save_gguf, m)?)?;
m.add_function(wrap_pyfunction!(load_safetensors, m)?)?;
m.add_function(wrap_pyfunction!(save_safetensors, m)?)?;
Ok(())
}
#[pyfunction]
#[pyo3(text_signature = "(tensor:Tensor, dim:int)")]
/// Applies the Softmax function to a given tensor.#
/// &RETURNS&: Tensor
fn softmax(tensor: PyTensor, dim: i64) -> PyResult<PyTensor> {
let dim = actual_dim(&tensor, dim).map_err(wrap_err)?;
let sm = candle_nn::ops::softmax(&tensor.0, dim).map_err(wrap_err)?;
Ok(PyTensor(sm))
}
#[pyfunction]
#[pyo3(signature = (tensor, ksize, *, stride=1), text_signature = "(tensor:Tensor, ksize:int, stride:int=1)")]
/// Applies the 2d avg-pool function to a given tensor.#
/// &RETURNS&: Tensor
fn avg_pool2d(tensor: PyTensor, ksize: usize, stride: usize) -> PyResult<PyTensor> {
let tensor = tensor
.avg_pool2d_with_stride(ksize, stride)
.map_err(wrap_err)?;
Ok(PyTensor(tensor))
}
#[pyfunction]
#[pyo3(signature = (tensor, ksize, *, stride=1), text_signature = "(tensor:Tensor, ksize:int, stride:int=1)")]
/// Applies the 2d max-pool function to a given tensor.#
/// &RETURNS&: Tensor
fn max_pool2d(tensor: PyTensor, ksize: usize, stride: usize) -> PyResult<PyTensor> {
let tensor = tensor
.max_pool2d_with_stride(ksize, stride)
.map_err(wrap_err)?;
Ok(PyTensor(tensor))
}
#[pyfunction]
#[pyo3(text_signature = "(tensor:Tensor)")]
/// Applies the Sigmoid Linear Unit (SiLU) function to a given tensor.
/// &RETURNS&: Tensor
fn silu(tensor: PyTensor) -> PyResult<PyTensor> {
let s = candle_nn::ops::silu(&tensor.0).map_err(wrap_err)?;
Ok(PyTensor(s))
}
#[pyfunction]
#[pyo3(text_signature = "(tensor:Tensor)")]
/// Applies the Gaussian Error Linear Unit (GELU) function to a given tensor.
/// &RETURNS&: Tensor
fn gelu(tensor: PyTensor) -> PyResult<PyTensor> {
let s = tensor.0.gelu_erf().map_err(wrap_err)?;
Ok(PyTensor(s))
}
#[pyfunction]
#[pyo3(text_signature = "(tensor:Tensor)")]
/// Applies the Rectified Linear Unit (ReLU) function to a given tensor.
/// &RETURNS&: Tensor
fn relu(tensor: PyTensor) -> PyResult<PyTensor> {
let s = tensor.0.relu().map_err(wrap_err)?;
Ok(PyTensor(s))
}
#[pyfunction]
#[pyo3(text_signature = "(tensor:Tensor)")]
/// Applies the tanh function to a given tensor.
/// &RETURNS&: Tensor
fn tanh(tensor: PyTensor) -> PyResult<PyTensor> {
let s = tensor.0.tanh().map_err(wrap_err)?;
Ok(PyTensor(s))
}
fn candle_functional_m(_py: Python<'_>, m: &Bound<'_, PyModule>) -> PyResult<()> {
m.add_function(wrap_pyfunction!(silu, m)?)?;
m.add_function(wrap_pyfunction!(softmax, m)?)?;
m.add_function(wrap_pyfunction!(max_pool2d, m)?)?;
m.add_function(wrap_pyfunction!(avg_pool2d, m)?)?;
m.add_function(wrap_pyfunction!(gelu, m)?)?;
m.add_function(wrap_pyfunction!(relu, m)?)?;
m.add_function(wrap_pyfunction!(tanh, m)?)?;
Ok(())
}
#[cfg(feature = "onnx")]
fn candle_onnx_m(_py: Python<'_>, m: &Bound<'_, PyModule>) -> PyResult<()> {
use onnx::{PyONNXModel, PyONNXTensorDescriptor};
m.add_class::<PyONNXModel>()?;
m.add_class::<PyONNXTensorDescriptor>()?;
Ok(())
}
#[pymodule]
fn candle(py: Python<'_>, m: &Bound<'_, PyModule>) -> PyResult<()> {
let utils = PyModule::new_bound(py, "utils")?;
candle_utils(py, &utils)?;
m.add_submodule(&utils)?;
let nn = PyModule::new_bound(py, "functional")?;
candle_functional_m(py, &nn)?;
m.add_submodule(&nn)?;
#[cfg(feature = "onnx")]
{
let onnx = PyModule::new_bound(py, "onnx")?;
candle_onnx_m(py, &onnx)?;
m.add_submodule(&onnx)?;
}
m.add_class::<PyTensor>()?;
m.add_class::<PyQTensor>()?;
m.add_class::<PyDType>()?;
m.add("u8", PyDType(DType::U8))?;
m.add("u32", PyDType(DType::U32))?;
m.add("i64", PyDType(DType::I64))?;
m.add("bf16", PyDType(DType::BF16))?;
m.add("f16", PyDType(DType::F16))?;
m.add("f32", PyDType(DType::F32))?;
m.add("f64", PyDType(DType::F64))?;
m.add_function(wrap_pyfunction!(cat, m)?)?;
m.add_function(wrap_pyfunction!(ones, m)?)?;
m.add_function(wrap_pyfunction!(rand, m)?)?;
m.add_function(wrap_pyfunction!(randn, m)?)?;
m.add_function(wrap_pyfunction!(tensor, m)?)?;
m.add_function(wrap_pyfunction!(stack, m)?)?;
m.add_function(wrap_pyfunction!(zeros, m)?)?;
Ok(())
}
| 5 |
0 | hf_public_repos/candle/candle-pyo3 | hf_public_repos/candle/candle-pyo3/src/onnx.rs | use std::collections::HashMap;
use crate::utils::wrap_err;
use crate::{PyDType, PyTensor};
use candle_onnx::eval::{dtype, get_tensor, simple_eval};
use candle_onnx::onnx::tensor_proto::DataType;
use candle_onnx::onnx::tensor_shape_proto::dimension::Value;
use candle_onnx::onnx::type_proto::{Tensor as ONNXTensor, Value as ONNXValue};
use candle_onnx::onnx::{ModelProto, ValueInfoProto};
use pyo3::exceptions::PyValueError;
use pyo3::prelude::*;
use pyo3::types::{PyList, PyTuple};
#[derive(Clone, Debug)]
#[pyclass(name = "ONNXTensorDescription")]
/// A wrapper around an ONNX tensor description.
pub struct PyONNXTensorDescriptor(ONNXTensor);
#[pymethods]
impl PyONNXTensorDescriptor {
#[getter]
/// The data type of the tensor.
/// &RETURNS&: DType
fn dtype(&self) -> PyResult<PyDType> {
match DataType::try_from(self.0.elem_type) {
Ok(dt) => match dtype(dt) {
Some(dt) => Ok(PyDType(dt)),
None => Err(PyValueError::new_err(format!(
"unsupported 'value' data-type {dt:?}"
))),
},
type_ => Err(PyValueError::new_err(format!(
"unsupported input type {type_:?}"
))),
}
}
#[getter]
/// The shape of the tensor.
/// &RETURNS&: Tuple[Union[int,str,Any]]
fn shape(&self, py: Python) -> PyResult<Py<PyTuple>> {
let shape = PyList::empty_bound(py);
if let Some(d) = &self.0.shape {
for dim in d.dim.iter() {
if let Some(value) = &dim.value {
match value {
Value::DimValue(v) => shape.append(*v)?,
Value::DimParam(s) => shape.append(s.clone())?,
};
} else {
return Err(PyValueError::new_err("None value in shape"));
}
}
}
Ok(shape.to_tuple().into())
}
fn __repr__(&self, py: Python) -> String {
match (self.shape(py), self.dtype()) {
(Ok(shape), Ok(dtype)) => format!(
"TensorDescriptor[shape: {:?}, dtype: {:?}]",
shape.to_string(),
dtype.__str__()
),
(Err(_), Err(_)) => "TensorDescriptor[shape: unknown, dtype: unknown]".to_string(),
(Err(_), Ok(dtype)) => format!(
"TensorDescriptor[shape: unknown, dtype: {:?}]",
dtype.__str__()
),
(Ok(shape), Err(_)) => format!(
"TensorDescriptor[shape: {:?}, dtype: unknown]",
shape.to_string()
),
}
}
fn __str__(&self, py: Python) -> String {
self.__repr__(py)
}
}
#[derive(Clone, Debug)]
#[pyclass(name = "ONNXModel")]
/// A wrapper around an ONNX model.
pub struct PyONNXModel(ModelProto);
fn extract_tensor_descriptions(
value_infos: &[ValueInfoProto],
) -> HashMap<String, PyONNXTensorDescriptor> {
let mut map = HashMap::new();
for value_info in value_infos.iter() {
let input_type = match &value_info.r#type {
Some(input_type) => input_type,
None => continue,
};
let input_type = match &input_type.value {
Some(input_type) => input_type,
None => continue,
};
let tensor_type: &ONNXTensor = match input_type {
ONNXValue::TensorType(tt) => tt,
_ => continue,
};
map.insert(
value_info.name.to_string(),
PyONNXTensorDescriptor(tensor_type.clone()),
);
}
map
}
#[pymethods]
impl PyONNXModel {
#[new]
#[pyo3(text_signature = "(self, path:str)")]
/// Load an ONNX model from the given path.
fn new(path: String) -> PyResult<Self> {
let model: ModelProto = candle_onnx::read_file(path).map_err(wrap_err)?;
Ok(PyONNXModel(model))
}
#[getter]
/// The version of the IR this model targets.
/// &RETURNS&: int
fn ir_version(&self) -> i64 {
self.0.ir_version
}
#[getter]
/// The producer of the model.
/// &RETURNS&: str
fn producer_name(&self) -> String {
self.0.producer_name.clone()
}
#[getter]
/// The version of the producer of the model.
/// &RETURNS&: str
fn producer_version(&self) -> String {
self.0.producer_version.clone()
}
#[getter]
/// The domain of the operator set of the model.
/// &RETURNS&: str
fn domain(&self) -> String {
self.0.domain.clone()
}
#[getter]
/// The version of the model.
/// &RETURNS&: int
fn model_version(&self) -> i64 {
self.0.model_version
}
#[getter]
/// The doc string of the model.
/// &RETURNS&: str
fn doc_string(&self) -> String {
self.0.doc_string.clone()
}
/// Get the weights of the model.
/// &RETURNS&: Dict[str, Tensor]
fn initializers(&self) -> PyResult<HashMap<String, PyTensor>> {
let mut map = HashMap::new();
if let Some(graph) = self.0.graph.as_ref() {
for tensor_description in graph.initializer.iter() {
let tensor = get_tensor(tensor_description, tensor_description.name.as_str())
.map_err(wrap_err)?;
map.insert(tensor_description.name.to_string(), PyTensor(tensor));
}
}
Ok(map)
}
#[getter]
/// The inputs of the model.
/// &RETURNS&: Optional[Dict[str, ONNXTensorDescription]]
fn inputs(&self) -> Option<HashMap<String, PyONNXTensorDescriptor>> {
if let Some(graph) = self.0.graph.as_ref() {
return Some(extract_tensor_descriptions(&graph.input));
}
None
}
#[getter]
/// The outputs of the model.
/// &RETURNS&: Optional[Dict[str, ONNXTensorDescription]]
fn outputs(&self) -> Option<HashMap<String, PyONNXTensorDescriptor>> {
if let Some(graph) = self.0.graph.as_ref() {
return Some(extract_tensor_descriptions(&graph.output));
}
None
}
#[pyo3(text_signature = "(self, inputs:Dict[str,Tensor])")]
/// Run the model on the given inputs.
/// &RETURNS&: Dict[str,Tensor]
fn run(&self, inputs: HashMap<String, PyTensor>) -> PyResult<HashMap<String, PyTensor>> {
let unwrapped_tensors = inputs.into_iter().map(|(k, v)| (k.clone(), v.0)).collect();
let result = simple_eval(&self.0, unwrapped_tensors).map_err(wrap_err)?;
Ok(result
.into_iter()
.map(|(k, v)| (k.clone(), PyTensor(v)))
.collect())
}
}
| 6 |
0 | hf_public_repos/candle/candle-pyo3 | hf_public_repos/candle/candle-pyo3/src/shape.rs | use ::candle::Tensor;
use pyo3::prelude::*;
#[derive(Clone, Debug)]
/// Represents an absolute shape e.g. (1, 2, 3)
pub struct PyShape(Vec<usize>);
impl<'source> pyo3::FromPyObject<'source> for PyShape {
fn extract_bound(ob: &Bound<'source, PyAny>) -> PyResult<Self> {
if ob.is_none() {
return Err(PyErr::new::<pyo3::exceptions::PyValueError, _>(
"Shape cannot be None",
));
}
let tuple = ob.downcast::<pyo3::types::PyTuple>()?;
if tuple.len() == 1 {
let first_element = tuple.get_item(0)?;
let dims: Vec<usize> = pyo3::FromPyObject::extract_bound(&first_element)?;
Ok(PyShape(dims))
} else {
let dims: Vec<usize> = pyo3::FromPyObject::extract_bound(tuple)?;
Ok(PyShape(dims))
}
}
}
impl From<PyShape> for ::candle::Shape {
fn from(val: PyShape) -> Self {
val.0.into()
}
}
#[derive(Clone, Debug)]
/// Represents a shape with a hole in it e.g. (1, -1, 3)
pub struct PyShapeWithHole(Vec<isize>);
impl<'source> pyo3::FromPyObject<'source> for PyShapeWithHole {
fn extract_bound(ob: &Bound<'source, PyAny>) -> PyResult<Self> {
if ob.is_none() {
return Err(PyErr::new::<pyo3::exceptions::PyValueError, _>(
"Shape cannot be None",
));
}
let tuple = ob.downcast::<pyo3::types::PyTuple>()?;
let dims: Vec<isize> = if tuple.len() == 1 {
let first_element = tuple.get_item(0)?;
pyo3::FromPyObject::extract_bound(&first_element)?
} else {
pyo3::FromPyObject::extract_bound(tuple)?
};
// Ensure we have only positive numbers and at most one "hole" (-1)
let negative_ones = dims.iter().filter(|&&x| x == -1).count();
let any_invalid_dimensions = dims.iter().any(|&x| x < -1 || x == 0);
if negative_ones > 1 || any_invalid_dimensions {
return Err(PyErr::new::<pyo3::exceptions::PyValueError, _>(format!(
"Invalid dimension in shape: {:?}",
dims
)));
}
Ok(PyShapeWithHole(dims))
}
}
impl PyShapeWithHole {
/// Returns `true` if the shape is absolute e.g. (1, 2, 3)
pub fn is_absolute(&self) -> bool {
self.0.iter().all(|x| *x > 0)
}
/// Convert a relative shape to an absolute shape e.g. (1, -1) -> (1, 12)
pub fn to_absolute(&self, t: &Tensor) -> PyResult<PyShape> {
if self.is_absolute() {
return Ok(PyShape(
self.0.iter().map(|x| *x as usize).collect::<Vec<usize>>(),
));
}
let mut elements = t.elem_count();
let mut new_dims: Vec<usize> = vec![];
for dim in self.0.iter() {
if *dim > 0 {
new_dims.push(*dim as usize);
elements /= *dim as usize;
} else if *dim == -1 {
new_dims.push(elements);
} else {
return Err(PyErr::new::<pyo3::exceptions::PyValueError, _>(format!(
"Invalid dimension in shape: {}",
dim
)));
}
}
Ok(PyShape(new_dims))
}
}
| 7 |
0 | hf_public_repos/candle/candle-pyo3 | hf_public_repos/candle/candle-pyo3/src/utils.rs | use pyo3::exceptions::PyValueError;
use pyo3::prelude::*;
pub fn wrap_err(err: ::candle::Error) -> PyErr {
PyErr::new::<PyValueError, _>(format!("{err:?}"))
}
| 8 |
0 | hf_public_repos/candle/candle-pyo3/py_src | hf_public_repos/candle/candle-pyo3/py_src/candle/__init__.pyi | # Generated content DO NOT EDIT
from typing import Any, Callable, Dict, List, Optional, Tuple, Union, Sequence
from os import PathLike
from candle.typing import _ArrayLike, Device, Scalar, Index, Shape
class bf16(DType):
pass
@staticmethod
def cat(tensors: List[Tensor], dim: int) -> Tensor:
"""
Concatenate the tensors across one axis.
"""
pass
class f16(DType):
pass
class f32(DType):
pass
class f64(DType):
pass
class i64(DType):
pass
@staticmethod
def ones(*shape: Shape, dtype: Optional[DType] = None, device: Optional[Device] = None) -> Tensor:
"""
Creates a new tensor filled with ones.
"""
pass
@staticmethod
def rand(*shape: Shape, device: Optional[Device] = None) -> Tensor:
"""
Creates a new tensor with random values.
"""
pass
@staticmethod
def randn(*shape: Shape, device: Optional[Device] = None) -> Tensor:
"""
Creates a new tensor with random values from a normal distribution.
"""
pass
@staticmethod
def stack(tensors: List[Tensor], dim: int) -> Tensor:
"""
Stack the tensors along a new axis.
"""
pass
@staticmethod
def tensor(data: _ArrayLike) -> Tensor:
"""
Creates a new tensor from a Python value. The value can be a scalar or array-like object.
"""
pass
class u32(DType):
pass
class u8(DType):
pass
@staticmethod
def zeros(*shape: Shape, dtype: Optional[DType] = None, device: Optional[Device] = None) -> Tensor:
"""
Creates a new tensor filled with zeros.
"""
pass
class DType:
"""
A `candle` dtype.
"""
class QTensor:
"""
A quantized tensor.
"""
def dequantize(self) -> Tensor:
"""
Dequantizes the tensor.
"""
pass
@property
def ggml_dtype(self) -> str:
"""
Gets the tensors quantized dtype.
"""
pass
def matmul_t(self, lhs: Tensor) -> Tensor:
"""
Performs a quantized matrix multiplication, with the quantized tensor as the right hand side.
"""
pass
@property
def rank(self) -> int:
"""
Gets the rank of the tensor.
"""
pass
@property
def shape(self) -> Tuple[int]:
"""
Gets the shape of the tensor.
"""
pass
class Tensor:
"""
A `candle` tensor.
"""
def __init__(self, data: _ArrayLike):
pass
def __add__(self, rhs: Union[Tensor, Scalar]) -> "Tensor":
"""
Add a scalar to a tensor or two tensors together.
"""
pass
def __eq__(self, rhs: Union[Tensor, Scalar]) -> "Tensor":
"""
Compare a tensor with a scalar or one tensor with another.
"""
pass
def __ge__(self, rhs: Union[Tensor, Scalar]) -> "Tensor":
"""
Compare a tensor with a scalar or one tensor with another.
"""
pass
def __getitem__(self, index: Union[Index, Tensor, Sequence[Index]]) -> "Tensor":
"""
Return a slice of a tensor.
"""
pass
def __gt__(self, rhs: Union[Tensor, Scalar]) -> "Tensor":
"""
Compare a tensor with a scalar or one tensor with another.
"""
pass
def __le__(self, rhs: Union[Tensor, Scalar]) -> "Tensor":
"""
Compare a tensor with a scalar or one tensor with another.
"""
pass
def __lt__(self, rhs: Union[Tensor, Scalar]) -> "Tensor":
"""
Compare a tensor with a scalar or one tensor with another.
"""
pass
def __mul__(self, rhs: Union[Tensor, Scalar]) -> "Tensor":
"""
Multiply a tensor by a scalar or one tensor by another.
"""
pass
def __ne__(self, rhs: Union[Tensor, Scalar]) -> "Tensor":
"""
Compare a tensor with a scalar or one tensor with another.
"""
pass
def __radd__(self, rhs: Union[Tensor, Scalar]) -> "Tensor":
"""
Add a scalar to a tensor or two tensors together.
"""
pass
def __richcmp__(self, rhs: Union[Tensor, Scalar], op) -> "Tensor":
"""
Compare a tensor with a scalar or one tensor with another.
"""
pass
def __rmul__(self, rhs: Union[Tensor, Scalar]) -> "Tensor":
"""
Multiply a tensor by a scalar or one tensor by another.
"""
pass
def __sub__(self, rhs: Union[Tensor, Scalar]) -> "Tensor":
"""
Subtract a scalar from a tensor or one tensor from another.
"""
pass
def __truediv__(self, rhs: Union[Tensor, Scalar]) -> "Tensor":
"""
Divide a tensor by a scalar or one tensor by another.
"""
pass
def abs(self) -> Tensor:
"""
Performs the `abs` operation on the tensor.
"""
pass
def argmax_keepdim(self, dim: int) -> Tensor:
"""
Returns the indices of the maximum value(s) across the selected dimension.
"""
pass
def argmin_keepdim(self, dim: int) -> Tensor:
"""
Returns the indices of the minimum value(s) across the selected dimension.
"""
pass
def broadcast_add(self, rhs: Tensor) -> Tensor:
"""
Adds the two tensors, while broadcasting the right-hand-side tensor to match the shape of the left-hand-side tensor.
"""
pass
def broadcast_as(self, *shape: Shape) -> Tensor:
"""
Broadcasts the tensor to the given shape.
"""
pass
def broadcast_div(self, rhs: Tensor) -> Tensor:
"""
Divides the two tensors, while broadcasting the right-hand-side tensor to match the shape of the left-hand-side tensor.
"""
pass
def broadcast_left(self, *shape: Shape) -> Tensor:
"""
Broadcasts the tensor to the given shape, adding new dimensions on the left.
"""
pass
def broadcast_mul(self, rhs: Tensor) -> Tensor:
"""
Multiplies the two tensors, while broadcasting the right-hand-side tensor to match the shape of the left-hand-side tensor.
"""
pass
def broadcast_sub(self, rhs: Tensor) -> Tensor:
"""
Subtracts the two tensors, while broadcasting the right-hand-side tensor to match the shape of the left-hand-side tensor.
"""
pass
def contiguous(self) -> Tensor:
"""
Makes the tensor contiguous in memory.
"""
pass
def copy(self) -> Tensor:
"""
Returns a copy of the tensor.
"""
pass
def cos(self) -> Tensor:
"""
Performs the `cos` operation on the tensor.
"""
pass
def detach(self) -> Tensor:
"""
Detach the tensor from the computation graph.
"""
pass
@property
def device(self) -> Device:
"""
Gets the tensor's device.
"""
pass
@property
def dtype(self) -> DType:
"""
Gets the tensor's dtype.
"""
pass
def exp(self) -> Tensor:
"""
Performs the `exp` operation on the tensor.
"""
pass
def flatten_all(self) -> Tensor:
"""
Flattens the tensor into a 1D tensor.
"""
pass
def flatten_from(self, dim: int) -> Tensor:
"""
Flattens the tensor on the dimension indexes from `dim` (inclusive) to the last dimension.
"""
pass
def flatten_to(self, dim: int) -> Tensor:
"""
Flattens the tensor on the dimension indexes from `0` to `dim` (inclusive).
"""
pass
def gather(self, index, dim):
"""
Gathers values along an axis specified by dim.
"""
pass
def get(self, index: int) -> Tensor:
"""
Gets the value at the specified index.
"""
pass
def index_select(self, rhs: Tensor, dim: int) -> Tensor:
"""
Select values for the input tensor at the target indexes across the specified dimension.
The `indexes` is argument is an int tensor with a single dimension.
The output has the same number of dimension as the `self` input. The target dimension of
the output has length the length of `indexes` and the values are taken from `self` using
the index from `indexes`. Other dimensions have the same number of elements as the input
tensor.
"""
pass
def is_contiguous(self) -> bool:
"""
Returns true if the tensor is contiguous in C order.
"""
pass
def is_fortran_contiguous(self) -> bool:
"""
Returns true if the tensor is contiguous in Fortran order.
"""
pass
def log(self) -> Tensor:
"""
Performs the `log` operation on the tensor.
"""
pass
def matmul(self, rhs: Tensor) -> Tensor:
"""
Performs a matrix multiplication between the two tensors.
"""
pass
def max_keepdim(self, dim: int) -> Tensor:
"""
Gathers the maximum value across the selected dimension.
"""
pass
def mean_all(self) -> Tensor:
"""
Returns the mean of the tensor.
"""
pass
def min_keepdim(self, dim: int) -> Tensor:
"""
Gathers the minimum value across the selected dimension.
"""
pass
def narrow(self, dim: int, start: int, len: int) -> Tensor:
"""
Returns a new tensor that is a narrowed version of the input, the dimension `dim`
ranges from `start` to `start + len`.
"""
pass
@property
def nelement(self) -> int:
"""
Gets the tensor's element count.
"""
pass
def powf(self, p: float) -> Tensor:
"""
Performs the `pow` operation on the tensor with the given exponent.
"""
pass
def quantize(self, quantized_dtype: str) -> QTensor:
"""
Quantize the tensor.
"""
pass
@property
def rank(self) -> int:
"""
Gets the tensor's rank.
"""
pass
def recip(self) -> Tensor:
"""
Get the `recip` of the tensor.
"""
pass
def reshape(self, *shape: Shape) -> Tensor:
"""
Reshapes the tensor to the given shape.
"""
pass
@property
def shape(self) -> Tuple[int]:
"""
Gets the tensor's shape.
"""
pass
def sin(self) -> Tensor:
"""
Performs the `sin` operation on the tensor.
"""
pass
def sqr(self) -> Tensor:
"""
Squares the tensor.
"""
pass
def sqrt(self) -> Tensor:
"""
Calculates the square root of the tensor.
"""
pass
def squeeze(self, dim: int) -> Tensor:
"""
Creates a new tensor with the specified dimension removed if its size was one.
"""
pass
@property
def stride(self) -> Tuple[int]:
"""
Gets the tensor's strides.
"""
pass
def sum_all(self) -> Tensor:
"""
Returns the sum of the tensor.
"""
pass
def sum_keepdim(self, dim: Union[int, List[int]]) -> Tensor:
"""
Returns the sum of all elements in the input tensor. The sum is performed over all the input dimensions.
"""
pass
def t(self) -> Tensor:
"""
Transposes the tensor.
"""
pass
def to(self, *args, **kwargs) -> Tensor:
"""
Performs Tensor dtype and/or device conversion.
"""
pass
def to_device(self, device: Union[str, Device]) -> Tensor:
"""
Move the tensor to a new device.
"""
pass
def to_dtype(self, dtype: Union[str, DType]) -> Tensor:
"""
Convert the tensor to a new dtype.
"""
pass
def to_torch(self) -> torch.Tensor:
"""
Converts candle's tensor to pytorch's tensor
"""
pass
def transpose(self, dim1: int, dim2: int) -> Tensor:
"""
Returns a tensor that is a transposed version of the input, the given dimensions are swapped.
"""
pass
def unsqueeze(self, dim: int) -> Tensor:
"""
Creates a new tensor with a dimension of size one inserted at the specified position.
"""
pass
def values(self) -> _ArrayLike:
"""
Gets the tensor's data as a Python scalar or array-like object.
"""
pass
def where_cond(self, on_true: Tensor, on_false: Tensor) -> Tensor:
"""
Returns a tensor with the same shape as the input tensor, the values are taken from
`on_true` if the input tensor value is not zero, and `on_false` at the positions where the
input tensor is equal to zero.
"""
pass
| 9 |
0 | hf_public_repos/autotrain-advanced/src/autotrain/trainers | hf_public_repos/autotrain-advanced/src/autotrain/trainers/text_regression/params.py | from typing import Optional
from pydantic import Field
from autotrain.trainers.common import AutoTrainParams
class TextRegressionParams(AutoTrainParams):
"""
TextRegressionParams is a configuration class for setting up text regression training parameters.
Attributes:
data_path (str): Path to the dataset.
model (str): Name of the pre-trained model to use. Default is "bert-base-uncased".
lr (float): Learning rate for the optimizer. Default is 5e-5.
epochs (int): Number of training epochs. Default is 3.
max_seq_length (int): Maximum sequence length for the inputs. Default is 128.
batch_size (int): Batch size for training. Default is 8.
warmup_ratio (float): Proportion of training to perform learning rate warmup. Default is 0.1.
gradient_accumulation (int): Number of steps to accumulate gradients before updating. Default is 1.
optimizer (str): Optimizer to use. Default is "adamw_torch".
scheduler (str): Learning rate scheduler to use. Default is "linear".
weight_decay (float): Weight decay to apply. Default is 0.0.
max_grad_norm (float): Maximum norm for the gradients. Default is 1.0.
seed (int): Random seed for reproducibility. Default is 42.
train_split (str): Name of the training data split. Default is "train".
valid_split (Optional[str]): Name of the validation data split. Default is None.
text_column (str): Name of the column containing text data. Default is "text".
target_column (str): Name of the column containing target data. Default is "target".
logging_steps (int): Number of steps between logging. Default is -1 (no logging).
project_name (str): Name of the project for output directory. Default is "project-name".
auto_find_batch_size (bool): Whether to automatically find the batch size. Default is False.
mixed_precision (Optional[str]): Mixed precision training mode (fp16, bf16, or None). Default is None.
save_total_limit (int): Maximum number of checkpoints to save. Default is 1.
token (Optional[str]): Token for accessing Hugging Face Hub. Default is None.
push_to_hub (bool): Whether to push the model to Hugging Face Hub. Default is False.
eval_strategy (str): Evaluation strategy to use. Default is "epoch".
username (Optional[str]): Hugging Face username. Default is None.
log (str): Logging method for experiment tracking. Default is "none".
early_stopping_patience (int): Number of epochs with no improvement after which training will be stopped. Default is 5.
early_stopping_threshold (float): Threshold for measuring the new optimum, to qualify as an improvement. Default is 0.01.
"""
data_path: str = Field(None, title="Data path")
model: str = Field("bert-base-uncased", title="Model name")
lr: float = Field(5e-5, title="Learning rate")
epochs: int = Field(3, title="Number of training epochs")
max_seq_length: int = Field(128, title="Max sequence length")
batch_size: int = Field(8, title="Training batch size")
warmup_ratio: float = Field(0.1, title="Warmup proportion")
gradient_accumulation: int = Field(1, title="Gradient accumulation steps")
optimizer: str = Field("adamw_torch", title="Optimizer")
scheduler: str = Field("linear", title="Scheduler")
weight_decay: float = Field(0.0, title="Weight decay")
max_grad_norm: float = Field(1.0, title="Max gradient norm")
seed: int = Field(42, title="Seed")
train_split: str = Field("train", title="Train split")
valid_split: Optional[str] = Field(None, title="Validation split")
text_column: str = Field("text", title="Text column")
target_column: str = Field("target", title="Target column(s)")
logging_steps: int = Field(-1, title="Logging steps")
project_name: str = Field("project-name", title="Output directory")
auto_find_batch_size: bool = Field(False, title="Auto find batch size")
mixed_precision: Optional[str] = Field(None, title="fp16, bf16, or None")
save_total_limit: int = Field(1, title="Save total limit")
token: Optional[str] = Field(None, title="Hub Token")
push_to_hub: bool = Field(False, title="Push to hub")
eval_strategy: str = Field("epoch", title="Evaluation strategy")
username: Optional[str] = Field(None, title="Hugging Face Username")
log: str = Field("none", title="Logging using experiment tracking")
early_stopping_patience: int = Field(5, title="Early stopping patience")
early_stopping_threshold: float = Field(0.01, title="Early stopping threshold")
| 0 |
0 | hf_public_repos/autotrain-advanced/src/autotrain/trainers | hf_public_repos/autotrain-advanced/src/autotrain/trainers/token_classification/utils.py | import os
import numpy as np
from seqeval import metrics
MODEL_CARD = """
---
tags:
- autotrain
- token-classification{base_model}
widget:
- text: "I love AutoTrain"{dataset_tag}
---
# Model Trained Using AutoTrain
- Problem type: Token Classification
## Validation Metrics
{validation_metrics}
"""
def token_classification_metrics(pred, label_list):
"""
Compute token classification metrics including precision, recall, F1 score, and accuracy.
Args:
pred (tuple): A tuple containing predictions and labels.
Predictions should be a 3D array (batch_size, sequence_length, num_labels).
Labels should be a 2D array (batch_size, sequence_length).
label_list (list): A list of label names corresponding to the indices used in predictions and labels.
Returns:
dict: A dictionary containing the following metrics:
- "precision": Precision score of the token classification.
- "recall": Recall score of the token classification.
- "f1": F1 score of the token classification.
- "accuracy": Accuracy score of the token classification.
"""
predictions, labels = pred
predictions = np.argmax(predictions, axis=2)
true_predictions = [
[label_list[predi] for (predi, lbl) in zip(prediction, label) if lbl != -100]
for prediction, label in zip(predictions, labels)
]
true_labels = [
[label_list[lbl] for (predi, lbl) in zip(prediction, label) if lbl != -100]
for prediction, label in zip(predictions, labels)
]
results = {
"precision": metrics.precision_score(true_labels, true_predictions),
"recall": metrics.recall_score(true_labels, true_predictions),
"f1": metrics.f1_score(true_labels, true_predictions),
"accuracy": metrics.accuracy_score(true_labels, true_predictions),
}
return results
def create_model_card(config, trainer):
"""
Generates a model card string based on the provided configuration and trainer.
Args:
config (object): Configuration object containing model and dataset information.
trainer (object): Trainer object used to evaluate the model.
Returns:
str: A formatted model card string with dataset tags, validation metrics, and base model information.
"""
if config.valid_split is not None:
eval_scores = trainer.evaluate()
valid_metrics = ["eval_loss", "eval_precision", "eval_recall", "eval_f1", "eval_accuracy"]
eval_scores = [f"{k[len('eval_'):]}: {v}" for k, v in eval_scores.items() if k in valid_metrics]
eval_scores = "\n\n".join(eval_scores)
else:
eval_scores = "No validation metrics available"
if config.data_path == f"{config.project_name}/autotrain-data" or os.path.isdir(config.data_path):
dataset_tag = ""
else:
dataset_tag = f"\ndatasets:\n- {config.data_path}"
if os.path.isdir(config.model):
base_model = ""
else:
base_model = f"\nbase_model: {config.model}"
model_card = MODEL_CARD.format(
dataset_tag=dataset_tag,
validation_metrics=eval_scores,
base_model=base_model,
)
return model_card
| 1 |
0 | hf_public_repos/autotrain-advanced/src/autotrain/trainers | hf_public_repos/autotrain-advanced/src/autotrain/trainers/token_classification/__main__.py | import argparse
import json
from functools import partial
from accelerate.state import PartialState
from datasets import load_dataset, load_from_disk
from huggingface_hub import HfApi
from transformers import (
AutoConfig,
AutoModelForTokenClassification,
AutoTokenizer,
EarlyStoppingCallback,
Trainer,
TrainingArguments,
)
from transformers.trainer_callback import PrinterCallback
from autotrain import logger
from autotrain.trainers.common import (
ALLOW_REMOTE_CODE,
LossLoggingCallback,
TrainStartCallback,
UploadLogs,
monitor,
pause_space,
remove_autotrain_data,
save_training_params,
)
from autotrain.trainers.token_classification import utils
from autotrain.trainers.token_classification.dataset import TokenClassificationDataset
from autotrain.trainers.token_classification.params import TokenClassificationParams
def parse_args():
# get training_config.json from the end user
parser = argparse.ArgumentParser()
parser.add_argument("--training_config", type=str, required=True)
return parser.parse_args()
@monitor
def train(config):
if isinstance(config, dict):
config = TokenClassificationParams(**config)
train_data = None
valid_data = None
# check if config.train_split.csv exists in config.data_path
if config.train_split is not None:
if config.data_path == f"{config.project_name}/autotrain-data":
logger.info("loading dataset from disk")
train_data = load_from_disk(config.data_path)[config.train_split]
else:
if ":" in config.train_split:
dataset_config_name, split = config.train_split.split(":")
train_data = load_dataset(
config.data_path,
name=dataset_config_name,
split=split,
token=config.token,
trust_remote_code=ALLOW_REMOTE_CODE,
)
else:
train_data = load_dataset(
config.data_path,
split=config.train_split,
token=config.token,
trust_remote_code=ALLOW_REMOTE_CODE,
)
if config.valid_split is not None:
if config.data_path == f"{config.project_name}/autotrain-data":
logger.info("loading dataset from disk")
valid_data = load_from_disk(config.data_path)[config.valid_split]
else:
if ":" in config.valid_split:
dataset_config_name, split = config.valid_split.split(":")
valid_data = load_dataset(
config.data_path,
name=dataset_config_name,
split=split,
token=config.token,
trust_remote_code=ALLOW_REMOTE_CODE,
)
else:
valid_data = load_dataset(
config.data_path,
split=config.valid_split,
token=config.token,
trust_remote_code=ALLOW_REMOTE_CODE,
)
label_list = train_data.features[config.tags_column].feature.names
num_classes = len(label_list)
model_config = AutoConfig.from_pretrained(config.model, num_labels=num_classes)
model_config._num_labels = num_classes
model_config.label2id = {l: i for i, l in enumerate(label_list)}
model_config.id2label = dict(enumerate(label_list))
try:
model = AutoModelForTokenClassification.from_pretrained(
config.model,
config=model_config,
trust_remote_code=ALLOW_REMOTE_CODE,
token=config.token,
ignore_mismatched_sizes=True,
)
except OSError:
model = AutoModelForTokenClassification.from_pretrained(
config.model,
config=model_config,
from_tf=True,
trust_remote_code=ALLOW_REMOTE_CODE,
token=config.token,
ignore_mismatched_sizes=True,
)
if model_config.model_type in {"bloom", "gpt2", "roberta"}:
tokenizer = AutoTokenizer.from_pretrained(
config.model, token=config.token, trust_remote_code=ALLOW_REMOTE_CODE, add_prefix_space=True
)
else:
tokenizer = AutoTokenizer.from_pretrained(
config.model, token=config.token, trust_remote_code=ALLOW_REMOTE_CODE
)
train_data = TokenClassificationDataset(data=train_data, tokenizer=tokenizer, config=config)
if config.valid_split is not None:
valid_data = TokenClassificationDataset(data=valid_data, tokenizer=tokenizer, config=config)
if config.logging_steps == -1:
if config.valid_split is not None:
logging_steps = int(0.2 * len(valid_data) / config.batch_size)
else:
logging_steps = int(0.2 * len(train_data) / config.batch_size)
if logging_steps == 0:
logging_steps = 1
if logging_steps > 25:
logging_steps = 25
config.logging_steps = logging_steps
else:
logging_steps = config.logging_steps
logger.info(f"Logging steps: {logging_steps}")
training_args = dict(
output_dir=config.project_name,
per_device_train_batch_size=config.batch_size,
per_device_eval_batch_size=2 * config.batch_size,
learning_rate=config.lr,
num_train_epochs=config.epochs,
eval_strategy=config.eval_strategy if config.valid_split is not None else "no",
logging_steps=logging_steps,
save_total_limit=config.save_total_limit,
save_strategy=config.eval_strategy if config.valid_split is not None else "no",
gradient_accumulation_steps=config.gradient_accumulation,
report_to=config.log,
auto_find_batch_size=config.auto_find_batch_size,
lr_scheduler_type=config.scheduler,
optim=config.optimizer,
warmup_ratio=config.warmup_ratio,
weight_decay=config.weight_decay,
max_grad_norm=config.max_grad_norm,
push_to_hub=False,
load_best_model_at_end=True if config.valid_split is not None else False,
ddp_find_unused_parameters=False,
)
if config.mixed_precision == "fp16":
training_args["fp16"] = True
if config.mixed_precision == "bf16":
training_args["bf16"] = True
if config.valid_split is not None:
early_stop = EarlyStoppingCallback(
early_stopping_patience=config.early_stopping_patience,
early_stopping_threshold=config.early_stopping_threshold,
)
callbacks_to_use = [early_stop]
else:
callbacks_to_use = []
callbacks_to_use.extend([UploadLogs(config=config), LossLoggingCallback(), TrainStartCallback()])
args = TrainingArguments(**training_args)
trainer_args = dict(
args=args,
model=model,
callbacks=callbacks_to_use,
compute_metrics=partial(utils.token_classification_metrics, label_list=label_list),
)
trainer = Trainer(
**trainer_args,
train_dataset=train_data,
eval_dataset=valid_data,
)
trainer.remove_callback(PrinterCallback)
trainer.train()
logger.info("Finished training, saving model...")
trainer.save_model(config.project_name)
tokenizer.save_pretrained(config.project_name)
model_card = utils.create_model_card(config, trainer)
# save model card to output directory as README.md
with open(f"{config.project_name}/README.md", "w", encoding="utf-8") as f:
f.write(model_card)
if config.push_to_hub:
if PartialState().process_index == 0:
remove_autotrain_data(config)
save_training_params(config)
logger.info("Pushing model to hub...")
api = HfApi(token=config.token)
api.create_repo(
repo_id=f"{config.username}/{config.project_name}", repo_type="model", private=True, exist_ok=True
)
api.upload_folder(
folder_path=config.project_name,
repo_id=f"{config.username}/{config.project_name}",
repo_type="model",
)
if PartialState().process_index == 0:
pause_space(config)
if __name__ == "__main__":
args = parse_args()
training_config = json.load(open(args.training_config))
config = TokenClassificationParams(**training_config)
train(config)
| 2 |
0 | hf_public_repos/autotrain-advanced/src/autotrain/trainers | hf_public_repos/autotrain-advanced/src/autotrain/trainers/token_classification/dataset.py | class TokenClassificationDataset:
"""
A dataset class for token classification tasks.
Args:
data (Dataset): The dataset containing the text and tags.
tokenizer (PreTrainedTokenizer): The tokenizer to be used for tokenizing the text.
config (Config): Configuration object containing necessary parameters.
Attributes:
data (Dataset): The dataset containing the text and tags.
tokenizer (PreTrainedTokenizer): The tokenizer to be used for tokenizing the text.
config (Config): Configuration object containing necessary parameters.
Methods:
__len__():
Returns the number of samples in the dataset.
__getitem__(item):
Retrieves a tokenized sample and its corresponding labels.
Args:
item (int): The index of the sample to retrieve.
Returns:
dict: A dictionary containing tokenized text and corresponding labels.
"""
def __init__(self, data, tokenizer, config):
self.data = data
self.tokenizer = tokenizer
self.config = config
def __len__(self):
return len(self.data)
def __getitem__(self, item):
text = self.data[item][self.config.tokens_column]
tags = self.data[item][self.config.tags_column]
label_list = self.data.features[self.config.tags_column].feature.names
label_to_id = {i: i for i in range(len(label_list))}
tokenized_text = self.tokenizer(
text,
max_length=self.config.max_seq_length,
padding="max_length",
truncation=True,
is_split_into_words=True,
)
word_ids = tokenized_text.word_ids(batch_index=0)
previous_word_idx = None
label_ids = []
for word_idx in word_ids:
if word_idx is None:
label_ids.append(-100)
elif word_idx != previous_word_idx:
label_ids.append(label_to_id[tags[word_idx]])
else:
label_ids.append(label_to_id[tags[word_idx]])
previous_word_idx = word_idx
tokenized_text["labels"] = label_ids
return tokenized_text
| 3 |
0 | hf_public_repos/autotrain-advanced/src/autotrain/trainers | hf_public_repos/autotrain-advanced/src/autotrain/trainers/token_classification/params.py | from typing import Optional
from pydantic import Field
from autotrain.trainers.common import AutoTrainParams
class TokenClassificationParams(AutoTrainParams):
"""
TokenClassificationParams is a configuration class for token classification training parameters.
Attributes:
data_path (str): Path to the dataset.
model (str): Name of the model to use. Default is "bert-base-uncased".
lr (float): Learning rate. Default is 5e-5.
epochs (int): Number of training epochs. Default is 3.
max_seq_length (int): Maximum sequence length. Default is 128.
batch_size (int): Training batch size. Default is 8.
warmup_ratio (float): Warmup proportion. Default is 0.1.
gradient_accumulation (int): Gradient accumulation steps. Default is 1.
optimizer (str): Optimizer to use. Default is "adamw_torch".
scheduler (str): Scheduler to use. Default is "linear".
weight_decay (float): Weight decay. Default is 0.0.
max_grad_norm (float): Maximum gradient norm. Default is 1.0.
seed (int): Random seed. Default is 42.
train_split (str): Name of the training split. Default is "train".
valid_split (Optional[str]): Name of the validation split. Default is None.
tokens_column (str): Name of the tokens column. Default is "tokens".
tags_column (str): Name of the tags column. Default is "tags".
logging_steps (int): Number of steps between logging. Default is -1.
project_name (str): Name of the project. Default is "project-name".
auto_find_batch_size (bool): Whether to automatically find the batch size. Default is False.
mixed_precision (Optional[str]): Mixed precision setting (fp16, bf16, or None). Default is None.
save_total_limit (int): Total number of checkpoints to save. Default is 1.
token (Optional[str]): Hub token for authentication. Default is None.
push_to_hub (bool): Whether to push the model to the Hugging Face hub. Default is False.
eval_strategy (str): Evaluation strategy. Default is "epoch".
username (Optional[str]): Hugging Face username. Default is None.
log (str): Logging method for experiment tracking. Default is "none".
early_stopping_patience (int): Patience for early stopping. Default is 5.
early_stopping_threshold (float): Threshold for early stopping. Default is 0.01.
"""
data_path: str = Field(None, title="Data path")
model: str = Field("bert-base-uncased", title="Model name")
lr: float = Field(5e-5, title="Learning rate")
epochs: int = Field(3, title="Number of training epochs")
max_seq_length: int = Field(128, title="Max sequence length")
batch_size: int = Field(8, title="Training batch size")
warmup_ratio: float = Field(0.1, title="Warmup proportion")
gradient_accumulation: int = Field(1, title="Gradient accumulation steps")
optimizer: str = Field("adamw_torch", title="Optimizer")
scheduler: str = Field("linear", title="Scheduler")
weight_decay: float = Field(0.0, title="Weight decay")
max_grad_norm: float = Field(1.0, title="Max gradient norm")
seed: int = Field(42, title="Seed")
train_split: str = Field("train", title="Train split")
valid_split: Optional[str] = Field(None, title="Validation split")
tokens_column: str = Field("tokens", title="Tokens column")
tags_column: str = Field("tags", title="Tags column")
logging_steps: int = Field(-1, title="Logging steps")
project_name: str = Field("project-name", title="Output directory")
auto_find_batch_size: bool = Field(False, title="Auto find batch size")
mixed_precision: Optional[str] = Field(None, title="fp16, bf16, or None")
save_total_limit: int = Field(1, title="Save total limit")
token: Optional[str] = Field(None, title="Hub Token")
push_to_hub: bool = Field(False, title="Push to hub")
eval_strategy: str = Field("epoch", title="Evaluation strategy")
username: Optional[str] = Field(None, title="Hugging Face Username")
log: str = Field("none", title="Logging using experiment tracking")
early_stopping_patience: int = Field(5, title="Early stopping patience")
early_stopping_threshold: float = Field(0.01, title="Early stopping threshold")
| 4 |
0 | hf_public_repos/autotrain-advanced/src/autotrain/trainers | hf_public_repos/autotrain-advanced/src/autotrain/trainers/clm/train_clm_sft.py | from peft import LoraConfig
from transformers.trainer_callback import PrinterCallback
from trl import SFTConfig, SFTTrainer
from autotrain import logger
from autotrain.trainers.clm import utils
from autotrain.trainers.clm.params import LLMTrainingParams
def train(config):
logger.info("Starting SFT training...")
if isinstance(config, dict):
config = LLMTrainingParams(**config)
train_data, valid_data = utils.process_input_data(config)
tokenizer = utils.get_tokenizer(config)
train_data, valid_data = utils.process_data_with_chat_template(config, tokenizer, train_data, valid_data)
logging_steps = utils.configure_logging_steps(config, train_data, valid_data)
training_args = utils.configure_training_args(config, logging_steps)
config = utils.configure_block_size(config, tokenizer)
training_args["dataset_text_field"] = config.text_column
training_args["max_seq_length"] = config.block_size
training_args["packing"] = True
args = SFTConfig(**training_args)
model = utils.get_model(config, tokenizer)
if config.peft:
peft_config = LoraConfig(
r=config.lora_r,
lora_alpha=config.lora_alpha,
lora_dropout=config.lora_dropout,
bias="none",
task_type="CAUSAL_LM",
target_modules=utils.get_target_modules(config),
)
logger.info("creating trainer")
callbacks = utils.get_callbacks(config)
trainer_args = dict(
args=args,
model=model,
callbacks=callbacks,
)
trainer = SFTTrainer(
**trainer_args,
train_dataset=train_data,
eval_dataset=valid_data if config.valid_split is not None else None,
peft_config=peft_config if config.peft else None,
processing_class=tokenizer,
)
trainer.remove_callback(PrinterCallback)
trainer.train()
utils.post_training_steps(config, trainer)
| 5 |
0 | hf_public_repos/autotrain-advanced/src/autotrain/trainers | hf_public_repos/autotrain-advanced/src/autotrain/trainers/clm/utils.py | import ast
import gc
import os
from enum import Enum
from itertools import chain
import requests
import torch
from accelerate.state import PartialState
from datasets import load_dataset, load_from_disk
from huggingface_hub import HfApi
from peft import LoraConfig, PeftModel, get_peft_model, prepare_model_for_kbit_training
from transformers import AutoConfig, AutoModelForCausalLM, AutoTokenizer, BitsAndBytesConfig
from autotrain import is_unsloth_available, logger
from autotrain.trainers.clm.callbacks import LoadBestPeftModelCallback, SavePeftModelCallback
from autotrain.trainers.common import (
ALLOW_REMOTE_CODE,
LossLoggingCallback,
TrainStartCallback,
UploadLogs,
pause_space,
remove_autotrain_data,
save_training_params,
)
DEFAULT_CHAT_TEMPLATE = "{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}"
CHATML_CHAT_TEMPLATE = "{% for message in messages %}\n{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% if loop.last and add_generation_prompt %}{{'<|im_start|>assistant\n' }}{% endif %}{% endfor %}"
ZEPHYR_CHAT_TEMPLATE = "{% for message in messages %}\n{% if message['role'] == 'user' %}\n{{ '<|user|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'system' %}\n{{ '<|system|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'assistant' %}\n{{ '<|assistant|>\n' + message['content'] + eos_token }}\n{% endif %}\n{% if loop.last and add_generation_prompt %}\n{{ '<|assistant|>' }}\n{% endif %}\n{% endfor %}"
IGNORE_INDEX = -100
DEFAULT_PAD_TOKEN = "[PAD]"
DEFAULT_EOS_TOKEN = "</s>"
DEFAULT_BOS_TOKEN = "</s>"
DEFAULT_UNK_TOKEN = "</s>"
TARGET_MODULES = {
"Salesforce/codegen25-7b-multi": "q_proj,k_proj,v_proj,o_proj,down_proj,up_proj,gate_proj",
}
MODEL_CARD = """
---
tags:
- autotrain
- text-generation-inference
- text-generation{peft}
library_name: transformers{base_model}
widget:
- messages:
- role: user
content: What is your favorite condiment?
license: other{dataset_tag}
---
# Model Trained Using AutoTrain
This model was trained using AutoTrain. For more information, please visit [AutoTrain](https://hf.co/docs/autotrain).
# Usage
```python
from transformers import AutoModelForCausalLM, AutoTokenizer
model_path = "PATH_TO_THIS_REPO"
tokenizer = AutoTokenizer.from_pretrained(model_path)
model = AutoModelForCausalLM.from_pretrained(
model_path,
device_map="auto",
torch_dtype='auto'
).eval()
# Prompt content: "hi"
messages = [
{{"role": "user", "content": "hi"}}
]
input_ids = tokenizer.apply_chat_template(conversation=messages, tokenize=True, add_generation_prompt=True, return_tensors='pt')
output_ids = model.generate(input_ids.to('cuda'))
response = tokenizer.decode(output_ids[0][input_ids.shape[1]:], skip_special_tokens=True)
# Model response: "Hello! How can I assist you today?"
print(response)
```
"""
class ZephyrSpecialTokens(str, Enum):
USER = "<|user|>"
ASSISTANT = "<|assistant|>"
SYSTEM = "<|system|>"
EOS_TOKEN = "</s>"
BOS_TOKEN = "<s>"
PAD_TOKEN = "<pad>"
@classmethod
def list(cls):
return [c.value for c in cls]
class ChatmlSpecialTokens(str, Enum):
USER = "<|im_start|>user"
ASSISTANT = "<|im_start|>assistant"
SYSTEM = "<|im_start|>system"
EOS_TOKEN = "<|im_end|>"
BOS_TOKEN = "<s>"
PAD_TOKEN = "<pad>"
@classmethod
def list(cls):
return [c.value for c in cls]
def preprocess_reward(examples, tokenizer):
"""
Preprocesses the reward data by tokenizing the chosen and rejected examples.
Args:
examples (dict): A dictionary containing two keys, "chosen" and "rejected", each mapping to a list of text examples.
tokenizer (PreTrainedTokenizer): A tokenizer instance from the Hugging Face library used to tokenize the text examples.
Returns:
dict: A dictionary with the following keys:
- "input_ids_chosen": List of tokenized input IDs for the chosen examples.
- "attention_mask_chosen": List of attention masks for the chosen examples.
- "input_ids_rejected": List of tokenized input IDs for the rejected examples.
- "attention_mask_rejected": List of attention masks for the rejected examples.
"""
new_examples = {
"input_ids_chosen": [],
"attention_mask_chosen": [],
"input_ids_rejected": [],
"attention_mask_rejected": [],
}
for chosen, rejected in zip(examples["chosen"], examples["rejected"]):
tokenized_chosen = tokenizer(chosen, truncation=True)
tokenized_rejected = tokenizer(rejected, truncation=True)
new_examples["input_ids_chosen"].append(tokenized_chosen["input_ids"])
new_examples["attention_mask_chosen"].append(tokenized_chosen["attention_mask"])
new_examples["input_ids_rejected"].append(tokenized_rejected["input_ids"])
new_examples["attention_mask_rejected"].append(tokenized_rejected["attention_mask"])
return new_examples
def get_target_modules(config):
"""
Determines the target modules based on the provided configuration.
Args:
config (object): Configuration object that contains the following attributes:
- target_modules (str or None): Specifies the target modules. It can be:
- None: Returns the default target modules for the model specified in the config.
- An empty string: Returns the default target modules for the model specified in the config.
- "all-linear": Returns the string "all-linear".
- A comma-separated string: Returns a list of target modules split by commas.
Returns:
list or str: A list of target modules or a specific string ("all-linear") based on the configuration.
"""
if config.target_modules is None:
return TARGET_MODULES.get(config.model)
if config.target_modules.strip() == "":
return TARGET_MODULES.get(config.model)
if config.target_modules.strip().lower() == "all-linear":
return "all-linear"
return config.target_modules.split(",")
def group_texts(examples, config):
"""
Groups texts into chunks of a specified block size.
Args:
examples (dict): A dictionary where keys are feature names and values are lists of lists containing text data.
config (object): A configuration object that contains the block_size attribute.
Returns:
dict: A dictionary with the same keys as the input examples, where each value is a list of chunks of text data.
Additionally, a "labels" key is added with the same value as the "input_ids" key.
"""
# Concatenate all texts.
concatenated_examples = {k: list(chain(*examples[k])) for k in examples.keys()}
total_length = len(concatenated_examples[list(examples.keys())[0]])
# We drop the small remainder, we could add padding if the model supported it instead of this drop, you can
# customize this part to your needs.
if total_length >= config.block_size:
total_length = (total_length // config.block_size) * config.block_size
else:
total_length = 0
# Split by chunks of max_len.
result = {
k: [t[i : i + config.block_size] for i in range(0, total_length, config.block_size)]
for k, t in concatenated_examples.items()
}
result["labels"] = result["input_ids"].copy()
return result
def tokenize(examples, tokenizer, config):
"""
Tokenizes the input examples using the provided tokenizer and configuration.
Args:
examples (dict): A dictionary containing the input examples to be tokenized.
tokenizer (PreTrainedTokenizer): The tokenizer to be used for tokenizing the examples.
config (object): Configuration object that contains the text column name.
Returns:
dict: A dictionary containing the tokenized output.
"""
output = tokenizer(examples[config.text_column])
return output
def merge_adapter(base_model_path, target_model_path, adapter_path):
"""
Merges an adapter into a base model and saves the resulting model and tokenizer.
Args:
base_model_path (str): Path to the base model directory.
target_model_path (str): Path to the directory where the merged model and tokenizer will be saved.
adapter_path (str): Path to the adapter model directory.
Raises:
RuntimeError: If resizing token embeddings fails without padding to a multiple of 8.
"""
logger.info("Loading adapter...")
model = AutoModelForCausalLM.from_pretrained(
base_model_path,
torch_dtype=torch.float16,
low_cpu_mem_usage=True,
trust_remote_code=ALLOW_REMOTE_CODE,
)
tokenizer = AutoTokenizer.from_pretrained(
target_model_path,
trust_remote_code=ALLOW_REMOTE_CODE,
)
try:
model.resize_token_embeddings(len(tokenizer))
model = PeftModel.from_pretrained(model, adapter_path)
except RuntimeError:
model.resize_token_embeddings(len(tokenizer), pad_to_multiple_of=8)
model = PeftModel.from_pretrained(model, adapter_path)
model = model.merge_and_unload()
logger.info("Saving target model...")
model.save_pretrained(target_model_path)
tokenizer.save_pretrained(target_model_path)
def create_model_card(config):
"""
Generates a model card string based on the provided configuration.
Args:
config (object): Configuration object with the following attributes:
- peft (bool): Indicates if PEFT (Parameter-Efficient Fine-Tuning) is used.
- data_path (str): Path to the dataset.
- project_name (str): Name of the project.
- model (str): Path or identifier of the model.
Returns:
str: A formatted model card string.
"""
if config.peft:
peft = "\n- peft"
else:
peft = ""
if config.data_path == f"{config.project_name}/autotrain-data" or os.path.isdir(config.data_path):
dataset_tag = ""
else:
dataset_tag = f"\ndatasets:\n- {config.data_path}"
if os.path.isdir(config.model):
base_model = ""
else:
base_model = f"\nbase_model: {config.model}"
model_card = MODEL_CARD.format(
dataset_tag=dataset_tag,
peft=peft,
base_model=base_model,
)
return model_card.strip()
def pause_endpoint(params):
"""
Pauses a Hugging Face endpoint using the provided parameters.
Args:
params (object): An object containing the necessary parameters, including:
- token (str): The authorization token to access the Hugging Face API.
Returns:
dict: The JSON response from the API call.
Raises:
KeyError: If the "ENDPOINT_ID" environment variable is not set.
requests.exceptions.RequestException: If there is an issue with the API request.
"""
endpoint_id = os.environ["ENDPOINT_ID"]
username = endpoint_id.split("/")[0]
project_name = endpoint_id.split("/")[1]
api_url = f"https://api.endpoints.huggingface.cloud/v2/endpoint/{username}/{project_name}/pause"
headers = {"Authorization": f"Bearer {params.token}"}
r = requests.post(api_url, headers=headers, timeout=30)
return r.json()
def apply_chat_template(
example,
tokenizer,
config,
):
"""
Applies a chat template to the given example based on the specified configuration.
Args:
example (dict): The input example containing the text data to be processed.
tokenizer (object): The tokenizer to be used for applying the chat template.
config (object): Configuration object containing the following attributes:
- trainer (str): Specifies the type of trainer. Can be "default", "sft", "reward", "dpo", or "orpo".
- text_column (str): The key in the example dict that contains the text data.
- chat_template (str): Specifies the chat template to be used. Relevant for "reward" and "dpo" trainers.
Returns:
dict: The modified example with the chat template applied.
Raises:
ValueError: If the required keys are not found in the example for "reward", "dpo", or "orpo" trainers.
"""
# kudos to Hugging Face H4 Team for this snippet
if config.trainer in ("default", "sft"):
messages = example[config.text_column]
if isinstance(messages, str):
messages = ast.literal_eval(messages)
example[config.text_column] = tokenizer.apply_chat_template(
messages, tokenize=False, add_generation_prompt=False
)
elif config.trainer == "reward":
if all(k in example.keys() for k in ("chosen", "rejected")):
chosen_messages = example["chosen"]
rejected_messages = example["rejected"]
if isinstance(chosen_messages, str):
chosen_messages = ast.literal_eval(chosen_messages)
if isinstance(rejected_messages, str):
rejected_messages = ast.literal_eval(rejected_messages)
if config.chat_template == "zephyr" and chosen_messages[0]["role"] != "system":
chosen_messages.insert(0, {"role": "system", "content": ""})
if config.chat_template == "zephyr" and rejected_messages[0]["role"] != "system":
rejected_messages.insert(0, {"role": "system", "content": ""})
example["chosen"] = tokenizer.apply_chat_template(chosen_messages, tokenize=False)
example["rejected"] = tokenizer.apply_chat_template(rejected_messages, tokenize=False)
else:
raise ValueError(
f"Could not format example as dialogue for `rm/orpo` task! Require `[chosen, rejected]` keys but found {list(example.keys())}"
)
elif config.trainer in ("dpo", "orpo"):
if all(k in example.keys() for k in ("chosen", "rejected")):
# For DPO, the inputs are triples of (prompt, chosen, rejected), where `chosen` and `rejected` are the final turn of a dialogue
# We therefore need to extract the N-1 turns to form the prompt
if isinstance(example["chosen"], str):
example["chosen"] = ast.literal_eval(example["chosen"])
if isinstance(example["rejected"], str):
example["rejected"] = ast.literal_eval(example["rejected"])
prompt_messages = example["chosen"][:-1]
if config.chat_template == "zephyr" and example["chosen"][0]["role"] != "system":
prompt_messages.insert(0, {"role": "system", "content": ""})
chosen_messages = example["chosen"][-1:]
rejected_messages = example["rejected"][-1:]
example["chosen"] = tokenizer.apply_chat_template(chosen_messages, tokenize=False)
example["rejected"] = tokenizer.apply_chat_template(rejected_messages, tokenize=False)
example["prompt"] = tokenizer.apply_chat_template(prompt_messages, tokenize=False)
else:
raise ValueError(
f"Could not format example as dialogue for `dpo` task! Require `[chosen, rejected]` keys but found {list(example.keys())}"
)
return example
def post_training_steps(config, trainer):
"""
Perform post-training steps including saving the model, creating a model card, merging adapter weights,
and optionally pushing the model to the Hugging Face Hub.
Args:
config (object): Configuration object containing various settings and parameters.
trainer (object): Trainer object used for training the model.
Steps:
1. Save the trained model and set `use_cache` to True.
2. Create a model card and save it as README.md in the output directory.
3. If PEFT (Parameter-Efficient Fine-Tuning) and adapter merging are enabled:
- Delete the trainer object and clear CUDA cache.
- Merge adapter weights into the base model.
- Remove adapter weight files from the output directory.
4. If pushing to the Hugging Face Hub is enabled:
- Remove training data folder.
- Push the model to the Hugging Face Hub repository.
5. Pause the space if the process index is 0.
Raises:
Exception: If merging adapter weights fails.
"""
logger.info("Finished training, saving model...")
trainer.model.config.use_cache = True
trainer.save_model(config.project_name)
model_card = create_model_card(config)
# save model card to output directory as README.md
with open(f"{config.project_name}/README.md", "w", encoding="utf-8") as f:
f.write(model_card)
if config.peft and config.merge_adapter:
del trainer
gc.collect()
torch.cuda.empty_cache()
logger.info("Merging adapter weights...")
try:
merge_adapter(
base_model_path=config.model,
target_model_path=config.project_name,
adapter_path=config.project_name,
)
# remove adapter weights: adapter_*
for file in os.listdir(config.project_name):
if file.startswith("adapter_"):
os.remove(f"{config.project_name}/{file}")
except Exception as e:
logger.warning(f"Failed to merge adapter weights: {e}")
logger.warning("Skipping adapter merge. Only adapter weights will be saved.")
if config.push_to_hub:
if PartialState().process_index == 0:
# remove data folder
remove_autotrain_data(config)
logger.info("Pushing model to hub...")
save_training_params(config)
api = HfApi(token=config.token)
api.create_repo(
repo_id=f"{config.username}/{config.project_name}", repo_type="model", private=True, exist_ok=True
)
api.upload_folder(
folder_path=config.project_name,
repo_id=f"{config.username}/{config.project_name}",
repo_type="model",
)
if PartialState().process_index == 0:
pause_space(config)
def process_input_data(config):
"""
Processes input data based on the provided configuration.
Args:
config (object): Configuration object containing the following attributes:
- data_path (str): Path to the dataset.
- project_name (str): Name of the project.
- train_split (str): Split name for training data.
- valid_split (str, optional): Split name for validation data.
- token (str, optional): Token for accessing the dataset.
- text_column (str): Name of the text column.
- rejected_text_column (str): Name of the rejected text column.
- prompt_text_column (str): Name of the prompt text column.
- trainer (str): Type of trainer (e.g., "dpo", "reward", "orpo").
Returns:
tuple: A tuple containing:
- train_data (Dataset): Processed training dataset.
- valid_data (Dataset or None): Processed validation dataset if valid_split is provided, otherwise None.
"""
if config.data_path == f"{config.project_name}/autotrain-data":
logger.info("loading dataset from disk")
train_data = load_from_disk(config.data_path)[config.train_split]
else:
if ":" in config.train_split:
dataset_config_name, split = config.train_split.split(":")
train_data = load_dataset(
config.data_path,
name=dataset_config_name,
split=split,
token=config.token,
trust_remote_code=ALLOW_REMOTE_CODE,
)
else:
train_data = load_dataset(
config.data_path,
split=config.train_split,
token=config.token,
trust_remote_code=ALLOW_REMOTE_CODE,
)
# rename columns for reward trainer
if config.trainer in ("dpo", "reward", "orpo"):
if not (config.text_column == "chosen" and config.text_column in train_data.column_names):
train_data = train_data.rename_column(config.text_column, "chosen")
if not (config.rejected_text_column == "rejected" and config.rejected_text_column in train_data.column_names):
train_data = train_data.rename_column(config.rejected_text_column, "rejected")
if config.trainer in ("dpo", "orpo"):
if not (config.prompt_text_column == "prompt" and config.prompt_text_column in train_data.column_names):
train_data = train_data.rename_column(config.prompt_text_column, "prompt")
if config.valid_split is not None:
if config.data_path == f"{config.project_name}/autotrain-data":
valid_data = load_from_disk(config.data_path)[config.valid_split]
else:
if ":" in config.valid_split:
dataset_config_name, split = config.valid_split.split(":")
valid_data = load_dataset(
config.data_path,
name=dataset_config_name,
split=split,
token=config.token,
trust_remote_code=ALLOW_REMOTE_CODE,
)
else:
valid_data = load_dataset(
config.data_path,
split=config.valid_split,
token=config.token,
trust_remote_code=ALLOW_REMOTE_CODE,
)
if config.trainer in ("dpo", "reward", "orpo"):
if not (config.text_column == "chosen" and config.text_column in valid_data.column_names):
valid_data = valid_data.rename_column(config.text_column, "chosen")
if not (
config.rejected_text_column == "rejected" and config.rejected_text_column in valid_data.column_names
):
valid_data = valid_data.rename_column(config.rejected_text_column, "rejected")
if config.trainer in ("dpo", "reward"):
if not (config.prompt_text_column == "prompt" and config.prompt_text_column in valid_data.column_names):
valid_data = valid_data.rename_column(config.prompt_text_column, "prompt")
else:
valid_data = None
logger.info(f"Train data: {train_data}")
logger.info(f"Valid data: {valid_data}")
return train_data, valid_data
def get_tokenizer(config):
"""
Initializes and returns a tokenizer based on the provided configuration.
Args:
config (object): Configuration object containing the following attributes:
- chat_template (str): The chat template type, either "chatml" or "zephyr".
- model (str): The model identifier to load the tokenizer from.
- token (str): The token to use for the tokenizer.
- model_max_length (int): The maximum length of the model.
- padding (str): The padding side, either "left" or "right".
Returns:
tokenizer (PreTrainedTokenizer): The initialized tokenizer with the specified configuration.
"""
special_tokens = None
chat_template = None
if config.chat_template == "chatml":
special_tokens = ChatmlSpecialTokens
chat_template = CHATML_CHAT_TEMPLATE
elif config.chat_template == "zephyr":
special_tokens = ZephyrSpecialTokens
chat_template = ZEPHYR_CHAT_TEMPLATE
if special_tokens is not None:
tokenizer = AutoTokenizer.from_pretrained(
config.model,
pad_token=special_tokens.PAD_TOKEN.value,
bos_token=special_tokens.BOS_TOKEN.value,
eos_token=special_tokens.EOS_TOKEN.value,
additional_special_tokens=special_tokens.list(),
token=config.token,
trust_remote_code=ALLOW_REMOTE_CODE,
)
tokenizer.chat_template = chat_template
else:
tokenizer = AutoTokenizer.from_pretrained(
config.model, token=config.token, trust_remote_code=ALLOW_REMOTE_CODE
)
if tokenizer.chat_template is None:
tokenizer.chat_template = DEFAULT_CHAT_TEMPLATE
if tokenizer.model_max_length > 2048:
tokenizer.model_max_length = config.model_max_length
if getattr(tokenizer, "pad_token", None) is None:
tokenizer.pad_token = tokenizer.eos_token
if getattr(tokenizer, "pad_token_id", None) is None:
tokenizer.pad_token_id = tokenizer.eos_token_id
if config.padding in ("left", "right"):
tokenizer.padding_side = config.padding
return tokenizer
def process_data_with_chat_template(config, tokenizer, train_data, valid_data):
"""
Processes training and validation data using a specified chat template.
Args:
config (object): Configuration object containing settings and parameters.
tokenizer (object): Tokenizer object used for tokenizing the data.
train_data (Dataset): Training dataset to be processed.
valid_data (Dataset): Validation dataset to be processed.
Returns:
tuple: A tuple containing the processed training and validation datasets.
Notes:
- If `config.chat_template` is one of ("chatml", "zephyr", "tokenizer"), the chat template will be applied.
- Logs information about the application of the chat template.
- For ORPO/DPO, the `prompt` will be extracted from chosen messages.
- If `config.valid_split` is not None, the validation data will also be processed.
"""
valid_data = None
if config.chat_template in ("chatml", "zephyr", "tokenizer"):
logger.info("Applying chat template")
logger.info("For ORPO/DPO, `prompt` will be extracted from chosen messages")
train_data = train_data.map(
apply_chat_template,
fn_kwargs={
"tokenizer": tokenizer,
"config": config,
},
)
if config.valid_split is not None:
valid_data = valid_data.map(
apply_chat_template,
fn_kwargs={
"tokenizer": tokenizer,
"config": config,
},
)
return train_data, valid_data
def configure_logging_steps(config, train_data, valid_data):
"""
Configures the logging steps for training based on the provided configuration and data.
Parameters:
config (object): Configuration object containing training parameters, including `logging_steps`, `valid_split`, and `batch_size`.
train_data (iterable): Training dataset.
valid_data (iterable): Validation dataset.
Returns:
int: The number of logging steps to be used during training.
Notes:
- If `config.logging_steps` is set to -1, the function calculates logging steps based on 20% of the length of the validation data (if `valid_split` is provided) or the training data.
- The calculated logging steps are constrained to be between 1 and 25.
- If `config.logging_steps` is not -1, the function uses the provided value.
"""
logger.info("configuring logging steps")
if config.logging_steps == -1:
if config.valid_split is not None:
logging_steps = int(0.2 * len(valid_data) / config.batch_size)
else:
logging_steps = int(0.2 * len(train_data) / config.batch_size)
if logging_steps == 0:
logging_steps = 1
if logging_steps > 25:
logging_steps = 25
config.logging_steps = logging_steps
else:
logging_steps = config.logging_steps
logger.info(f"Logging steps: {logging_steps}")
return logging_steps
def configure_training_args(config, logging_steps):
"""
Configures the training arguments for a language model based on the provided configuration.
Args:
config (object): Configuration object containing various training parameters.
logging_steps (int): Number of steps between logging events.
Returns:
dict: A dictionary containing the configured training arguments.
The configuration object `config` should have the following attributes:
- project_name (str): The name of the project, used as the output directory.
- batch_size (int): Batch size for both training and evaluation.
- lr (float): Learning rate.
- epochs (int): Number of training epochs.
- eval_strategy (str): Evaluation strategy, e.g., "steps" or "epoch".
- valid_split (float or None): Validation split ratio. If None, evaluation is disabled.
- save_total_limit (int): Maximum number of checkpoints to save.
- gradient_accumulation (int): Number of gradient accumulation steps.
- log (str): Logging destination, e.g., "tensorboard".
- auto_find_batch_size (bool): Whether to automatically find the optimal batch size.
- scheduler (str): Learning rate scheduler type.
- optimizer (str): Optimizer type.
- warmup_ratio (float): Warmup ratio for learning rate scheduling.
- weight_decay (float): Weight decay for the optimizer.
- max_grad_norm (float): Maximum gradient norm for clipping.
- disable_gradient_checkpointing (bool): Whether to disable gradient checkpointing.
- peft (bool): Whether to use Parameter-Efficient Fine-Tuning (PEFT).
- quantization (str): Quantization type, e.g., "int4" or "int8".
- mixed_precision (str): Mixed precision type, e.g., "fp16" or "bf16".
The function also sets additional training arguments based on the provided configuration,
such as enabling gradient checkpointing and mixed precision training.
"""
logger.info("configuring training args")
training_args = dict(
output_dir=config.project_name,
per_device_train_batch_size=config.batch_size,
per_device_eval_batch_size=config.batch_size,
learning_rate=config.lr,
num_train_epochs=config.epochs,
eval_strategy=config.eval_strategy if config.valid_split is not None else "no",
logging_steps=logging_steps,
save_total_limit=config.save_total_limit,
save_strategy=config.eval_strategy if config.valid_split is not None else "no",
gradient_accumulation_steps=config.gradient_accumulation,
report_to=config.log,
auto_find_batch_size=config.auto_find_batch_size,
lr_scheduler_type=config.scheduler,
optim=config.optimizer,
warmup_ratio=config.warmup_ratio,
weight_decay=config.weight_decay,
max_grad_norm=config.max_grad_norm,
push_to_hub=False,
load_best_model_at_end=True if config.valid_split is not None else False,
ddp_find_unused_parameters=False,
gradient_checkpointing=not config.disable_gradient_checkpointing,
remove_unused_columns=False,
)
if not config.disable_gradient_checkpointing:
if config.peft and config.quantization in ("int4", "int8"):
training_args["gradient_checkpointing_kwargs"] = {"use_reentrant": True}
else:
training_args["gradient_checkpointing_kwargs"] = {"use_reentrant": False}
if config.mixed_precision == "fp16":
training_args["fp16"] = True
if config.mixed_precision == "bf16":
training_args["bf16"] = True
return training_args
def configure_block_size(config, tokenizer):
"""
Configures the block size for the given configuration and tokenizer.
This function sets the `block_size` attribute in the `config` object based on the `tokenizer`'s maximum model length.
If `config.block_size` is -1, it is set to None. If `config.block_size` is None, it defaults to the tokenizer's
`model_max_length` but not exceeding 1024. If `config.block_size` is specified and exceeds the tokenizer's
`model_max_length`, a warning is logged and the block size is set to the tokenizer's `model_max_length`.
Args:
config (object): Configuration object that contains the `block_size` attribute.
tokenizer (object): Tokenizer object that contains the `model_max_length` attribute.
Returns:
object: The updated configuration object with the `block_size` attribute set.
"""
if config.block_size == -1:
config.block_size = None
if config.block_size is None:
block_size = tokenizer.model_max_length
if block_size > 1024:
logger.warning(
"The chosen tokenizer supports a `model_max_length` that is longer than the default `block_size` value"
" of 1024. If you would like to use a longer `block_size` up to `tokenizer.model_max_length` you can"
" override this default with `--block_size xxx`."
)
block_size = 1024
else:
if config.block_size > tokenizer.model_max_length:
logger.warning(
f"The block_size passed ({config.block_size}) is larger than the maximum length for the model"
f"({tokenizer.model_max_length}). Using block_size={tokenizer.model_max_length}."
)
block_size = min(config.block_size, tokenizer.model_max_length)
config.block_size = block_size
logger.info(f"Using block size {block_size}")
return config
def get_callbacks(config):
"""
Generate a list of callback instances based on the provided configuration.
This function creates a list of callback instances that are used during the training process.
It includes default callbacks for logging and training start, and conditionally adds callbacks
for saving and loading PEFT models based on the configuration and environment settings.
Args:
config (object): Configuration object containing training settings and parameters.
Returns:
list: A list of callback instances to be used during training.
"""
is_deepspeed_enabled = os.environ.get("ACCELERATE_USE_DEEPSPEED", "False").lower() == "true"
callbacks = [UploadLogs(config=config), LossLoggingCallback(), TrainStartCallback()]
if config.peft and not is_deepspeed_enabled:
callbacks.append(SavePeftModelCallback)
if config.valid_split is not None:
callbacks.append(LoadBestPeftModelCallback)
return callbacks
def get_model(config, tokenizer):
"""
Loads and configures a language model based on the provided configuration and tokenizer.
Args:
config (Namespace): Configuration object containing model parameters and settings.
- model (str): The model name or path.
- token (str): Token for accessing the model.
- unsloth (bool): Flag to determine if unsloth is used.
- trainer (str): Type of trainer to use.
- target_modules (str): Target modules for unsloth.
- peft (bool): Flag to determine if PEFT (Parameter-Efficient Fine-Tuning) is used.
- quantization (str): Quantization type, either "int4" or "int8".
- mixed_precision (str): Mixed precision type, either "fp16" or "bf16".
- block_size (int): Maximum sequence length.
- lora_r (int): LoRA rank.
- lora_alpha (int): LoRA alpha.
- lora_dropout (float): LoRA dropout rate.
- seed (int): Random seed.
- disable_gradient_checkpointing (bool): Flag to disable gradient checkpointing.
- use_flash_attention_2 (bool): Flag to use flash attention 2.
tokenizer (PreTrainedTokenizer): Tokenizer to use with the model.
Returns:
PreTrainedModel: The configured language model.
Raises:
ImportError: If unsloth is not available when required.
"""
model_config = AutoConfig.from_pretrained(
config.model,
token=config.token,
trust_remote_code=ALLOW_REMOTE_CODE,
)
model_type = model_config.model_type
unsloth_target_modules = None
can_use_unloth = False
if config.unsloth and is_unsloth_available() and config.trainer in ("default", "sft"):
can_use_unloth = True
if model_type in ("llama", "mistral", "gemma", "qwen2") and config.unsloth:
if config.target_modules.strip().lower() == "all-linear":
unsloth_target_modules = ["q_proj", "k_proj", "v_proj", "o_proj", "gate_proj", "up_proj", "down_proj"]
else:
unsloth_target_modules = get_target_modules(config)
else:
can_use_unloth = False
logger.info(f"Can use unsloth: {can_use_unloth}")
if can_use_unloth:
from unsloth import FastLanguageModel
load_in_4bit = False
load_in_8bit = False
if config.peft and config.quantization == "int4":
load_in_4bit = True
elif config.peft and config.quantization == "int8":
load_in_8bit = True
dtype = None
if config.mixed_precision == "fp16":
dtype = torch.float16
elif config.mixed_precision == "bf16":
dtype = torch.bfloat16
model, _ = FastLanguageModel.from_pretrained(
model_name=config.model,
token=config.token,
trust_remote_code=ALLOW_REMOTE_CODE,
load_in_4bit=load_in_4bit,
load_in_8bit=load_in_8bit,
max_seq_length=config.block_size,
dtype=dtype,
)
if config.peft:
model = FastLanguageModel.get_peft_model(
model,
r=config.lora_r,
target_modules=unsloth_target_modules,
lora_alpha=config.lora_alpha,
lora_dropout=config.lora_dropout,
bias="none",
use_gradient_checkpointing="unsloth",
random_state=config.seed,
max_seq_length=config.block_size,
use_rslora=False,
loftq_config=None,
)
return model
else:
logger.warning("Unsloth not available, continuing without it...")
logger.info("loading model config...")
model_config = AutoConfig.from_pretrained(
config.model,
token=config.token,
trust_remote_code=ALLOW_REMOTE_CODE,
use_cache=config.disable_gradient_checkpointing,
)
logger.info("loading model...")
if config.peft:
if config.quantization == "int4":
bnb_config = BitsAndBytesConfig(
load_in_4bit=True,
bnb_4bit_quant_type="nf4",
bnb_4bit_compute_dtype=torch.float16,
bnb_4bit_use_double_quant=False,
)
elif config.quantization == "int8":
bnb_config = BitsAndBytesConfig(load_in_8bit=True)
else:
bnb_config = None
model = AutoModelForCausalLM.from_pretrained(
config.model,
config=model_config,
token=config.token,
quantization_config=bnb_config,
trust_remote_code=ALLOW_REMOTE_CODE,
use_flash_attention_2=config.use_flash_attention_2,
)
else:
model = AutoModelForCausalLM.from_pretrained(
config.model,
config=model_config,
token=config.token,
trust_remote_code=ALLOW_REMOTE_CODE,
use_flash_attention_2=config.use_flash_attention_2,
)
logger.info(f"model dtype: {model.dtype}")
model.resize_token_embeddings(len(tokenizer))
if config.trainer != "default":
return model
if config.peft:
logger.info("preparing peft model...")
if config.quantization is not None:
gradient_checkpointing_kwargs = {}
if not config.disable_gradient_checkpointing:
if config.quantization in ("int4", "int8"):
gradient_checkpointing_kwargs = {"use_reentrant": True}
else:
gradient_checkpointing_kwargs = {"use_reentrant": False}
model = prepare_model_for_kbit_training(
model,
use_gradient_checkpointing=not config.disable_gradient_checkpointing,
gradient_checkpointing_kwargs=gradient_checkpointing_kwargs,
)
else:
model.enable_input_require_grads()
peft_config = LoraConfig(
r=config.lora_r,
lora_alpha=config.lora_alpha,
lora_dropout=config.lora_dropout,
bias="none",
task_type="CAUSAL_LM",
target_modules=get_target_modules(config),
)
model = get_peft_model(model, peft_config)
return model
| 6 |
0 | hf_public_repos/autotrain-advanced/src/autotrain/trainers | hf_public_repos/autotrain-advanced/src/autotrain/trainers/clm/train_clm_orpo.py | from peft import LoraConfig
from transformers.trainer_callback import PrinterCallback
from trl import ORPOConfig, ORPOTrainer
from autotrain import logger
from autotrain.trainers.clm import utils
from autotrain.trainers.clm.params import LLMTrainingParams
def train(config):
logger.info("Starting ORPO training...")
if isinstance(config, dict):
config = LLMTrainingParams(**config)
train_data, valid_data = utils.process_input_data(config)
tokenizer = utils.get_tokenizer(config)
train_data, valid_data = utils.process_data_with_chat_template(config, tokenizer, train_data, valid_data)
logging_steps = utils.configure_logging_steps(config, train_data, valid_data)
training_args = utils.configure_training_args(config, logging_steps)
config = utils.configure_block_size(config, tokenizer)
training_args["max_length"] = config.block_size
training_args["max_prompt_length"] = config.max_prompt_length
training_args["max_completion_length"] = config.max_completion_length
args = ORPOConfig(**training_args)
model = utils.get_model(config, tokenizer)
if config.peft:
peft_config = LoraConfig(
r=config.lora_r,
lora_alpha=config.lora_alpha,
lora_dropout=config.lora_dropout,
bias="none",
task_type="CAUSAL_LM",
target_modules=utils.get_target_modules(config),
)
logger.info("creating trainer")
callbacks = utils.get_callbacks(config)
trainer_args = dict(
args=args,
model=model,
callbacks=callbacks,
)
trainer = ORPOTrainer(
**trainer_args,
train_dataset=train_data,
eval_dataset=valid_data if config.valid_split is not None else None,
processing_class=tokenizer,
peft_config=peft_config if config.peft else None,
)
trainer.remove_callback(PrinterCallback)
trainer.train()
utils.post_training_steps(config, trainer)
| 7 |
0 | hf_public_repos/autotrain-advanced/src/autotrain/trainers | hf_public_repos/autotrain-advanced/src/autotrain/trainers/clm/train_clm_reward.py | from functools import partial
import torch
from peft import LoraConfig
from transformers import AutoConfig, AutoModelForSequenceClassification, BitsAndBytesConfig
from transformers.trainer_callback import PrinterCallback
from trl import RewardConfig, RewardTrainer
from autotrain import logger
from autotrain.trainers.clm import utils
from autotrain.trainers.clm.params import LLMTrainingParams
from autotrain.trainers.common import ALLOW_REMOTE_CODE
def train(config):
logger.info("Starting Reward training...")
if isinstance(config, dict):
config = LLMTrainingParams(**config)
train_data, valid_data = utils.process_input_data(config)
tokenizer = utils.get_tokenizer(config)
train_data, valid_data = utils.process_data_with_chat_template(config, tokenizer, train_data, valid_data)
logging_steps = utils.configure_logging_steps(config, train_data, valid_data)
training_args = utils.configure_training_args(config, logging_steps)
config = utils.configure_block_size(config, tokenizer)
training_args["max_length"] = config.block_size
args = RewardConfig(**training_args)
logger.info("loading model config...")
model_config = AutoConfig.from_pretrained(
config.model,
token=config.token,
trust_remote_code=ALLOW_REMOTE_CODE,
use_cache=config.disable_gradient_checkpointing,
)
model_config.num_labels = 1
model_config.pad_token_id = tokenizer.pad_token_id
model_config.pad_token = tokenizer.pad_token
logger.info("loading model...")
if config.peft:
if config.quantization == "int4":
bnb_config = BitsAndBytesConfig(
load_in_4bit=True,
bnb_4bit_quant_type="nf4",
bnb_4bit_compute_dtype=torch.float16,
bnb_4bit_use_double_quant=False,
)
elif config.quantization == "int8":
bnb_config = BitsAndBytesConfig(load_in_8bit=True)
else:
bnb_config = None
model = AutoModelForSequenceClassification.from_pretrained(
config.model,
config=model_config,
token=config.token,
quantization_config=bnb_config,
trust_remote_code=ALLOW_REMOTE_CODE,
use_flash_attention_2=config.use_flash_attention_2,
)
else:
model = AutoModelForSequenceClassification.from_pretrained(
config.model,
config=model_config,
token=config.token,
trust_remote_code=ALLOW_REMOTE_CODE,
use_flash_attention_2=config.use_flash_attention_2,
)
logger.info(f"model dtype: {model.dtype}")
model.resize_token_embeddings(len(tokenizer))
if config.peft:
peft_config = LoraConfig(
r=config.lora_r,
lora_alpha=config.lora_alpha,
lora_dropout=config.lora_dropout,
bias="none",
task_type="SEQ_CLS",
target_modules=utils.get_target_modules(config),
)
reward_proc = partial(utils.preprocess_reward, tokenizer=tokenizer)
train_data = train_data.map(
reward_proc,
batched=True,
num_proc=4,
desc="Running tokenizer on train dataset",
)
train_data = train_data.filter(
lambda x: len(x["input_ids_chosen"]) <= config.block_size and len(x["input_ids_rejected"]) <= config.block_size
)
if config.valid_split is not None:
valid_data = valid_data.map(
reward_proc,
batched=True,
num_proc=4,
desc="Running tokenizer on validation dataset",
)
valid_data = valid_data.filter(
lambda x: len(x["input_ids_chosen"]) <= config.block_size
and len(x["input_ids_rejected"]) <= config.block_size
)
logger.info("creating trainer")
callbacks = utils.get_callbacks(config)
trainer_args = dict(
args=args,
model=model,
callbacks=callbacks,
)
trainer = RewardTrainer(
**trainer_args,
train_dataset=train_data,
eval_dataset=valid_data if config.valid_split is not None else None,
peft_config=peft_config if config.peft else None,
processing_class=tokenizer,
)
trainer.remove_callback(PrinterCallback)
trainer.train()
utils.post_training_steps(config, trainer)
| 8 |
0 | hf_public_repos/autotrain-advanced/src/autotrain/trainers | hf_public_repos/autotrain-advanced/src/autotrain/trainers/clm/__main__.py | import argparse
import json
from autotrain.trainers.clm.params import LLMTrainingParams
from autotrain.trainers.common import monitor
def parse_args():
# get training_config.json from the end user
parser = argparse.ArgumentParser()
parser.add_argument("--training_config", type=str, required=True)
return parser.parse_args()
@monitor
def train(config):
if isinstance(config, dict):
config = LLMTrainingParams(**config)
if config.trainer == "default":
from autotrain.trainers.clm.train_clm_default import train as train_default
train_default(config)
elif config.trainer == "sft":
from autotrain.trainers.clm.train_clm_sft import train as train_sft
train_sft(config)
elif config.trainer == "reward":
from autotrain.trainers.clm.train_clm_reward import train as train_reward
train_reward(config)
elif config.trainer == "dpo":
from autotrain.trainers.clm.train_clm_dpo import train as train_dpo
train_dpo(config)
elif config.trainer == "orpo":
from autotrain.trainers.clm.train_clm_orpo import train as train_orpo
train_orpo(config)
else:
raise ValueError(f"trainer `{config.trainer}` not supported")
if __name__ == "__main__":
_args = parse_args()
training_config = json.load(open(_args.training_config))
_config = LLMTrainingParams(**training_config)
train(_config)
| 9 |
0 | hf_public_repos | hf_public_repos/blog/simple-considerations.md | ---
title: Simple considerations for simple people building fancy neural networks
thumbnail: /blog/assets/13_simple-considerations/henry-co-3coKbdfnAFg-unsplash.jpg
authors:
- user: VictorSanh
---

<span class="text-gray-500 text-xs">Photo by [Henry & Co.](https://unsplash.com/@hngstrm?utm_source=unsplash&utm_medium=referral&utm_content=creditCopyText) on [Unsplash](https://unsplash.com/s/photos/builder?utm_source=unsplash&utm_medium=referral&utm_content=creditCopyText)</span>
# 🚧 Simple considerations for simple people building fancy neural networks
As machine learning continues penetrating all aspects of the industry, neural networks have never been so hyped. For instance, models like GPT-3 have been all over social media in the past few weeks and continue to make headlines outside of tech news outlets with fear-mongering titles.

<div class="text-center text-xs text-gray-500">
<a class="text-gray-500" href="https://www.theguardian.com/commentisfree/2020/sep/08/robot-wrote-this-article-gpt-3">An article</a> from The Guardian
</div>
At the same time, deep learning frameworks, tools, and specialized libraries democratize machine learning research by making state-of-the-art research easier to use than ever. It is quite common to see these almost-magical/plug-and-play 5 lines of code that promise (near) state-of-the-art results. Working at [Hugging Face](https://huggingface.co/) 🤗, I admit that I am partially guilty of that. 😅 It can give an inexperienced user the misleading impression that neural networks are now a mature technology while in fact, the field is in constant development.
In reality, **building and training neural networks can often be an extremely frustrating experience**:
* It is sometimes hard to understand if your performance comes from a bug in your model/code or is simply limited by your model’s expressiveness.
* You can make tons of tiny mistakes at every step of the process without realizing at first, and your model will still train and give a decent performance.
**In this post, I will try to highlight a few steps of my mental process when it comes to building and debugging neural networks.** By “debugging”, I mean making sure you align what you have built and what you have in mind. I will also point out things you can look at when you are not sure what your next step should be by listing the typical questions I ask myself.
_A lot of these thoughts stem from my experience doing research in natural language processing but most of these principles can be applied to other fields of machine learning._
## 1. 🙈 Start by putting machine learning aside
It might sound counter-intuitive but the very first step of building a neural network is to **put aside machine learning and simply focus on your data**. Look at the examples, their labels, the diversity of the vocabulary if you are working with text, their length distribution, etc. You should dive into the data to get a first sense of the raw product you are working with and focus on extracting general patterns that a model might be able to catch. Hopefully, by looking at a few hundred examples, you will be able to identify high-level patterns. A few standard questions you can ask yourself:
* Are the labels balanced?
* Are there gold-labels that you do not agree with?
* How were the data obtained? What are the possible sources of noise in this process?
* Are there any preprocessing steps that seem natural (tokenization, URL or hashtag removing, etc.)?
* How diverse are the examples?
* What rule-based algorithm would perform decently on this problem?
It is important to get a **high-level feeling (qualitative) of your dataset along with a fine-grained analysis (quantitative)**. If you are working with a public dataset, someone else might have already dived into the data and reported their analysis (it is quite common in Kaggle competition for instance) so you should absolutely have a look at these!
## 2. 📚 Continue as if you just started machine learning
Once you have a deep and broad understanding of your data, I always recommend **to put yourself in the shoes of your old self when you just started machine learning** and were watching introduction classes from Andrew Ng on Coursera. **Start as simple as possible to get a sense of the difficulty of your task and how well standard baselines would perform.** For instance, if you work with text, standard baselines for binary text classification can include a logistic regression trained on top of word2vec or fastText embeddings. With the current tools, running these baselines is as easy (if not more) as running BERT which can arguably be considered one of the standard tools for many natural language processing problems. If other baselines are available, run (or implement) some of them. It will help you get even more familiar with the data.
As developers, it easy to feel good when building something fancy but it is sometimes hard to rationally justify it if it beats easy baselines by only a few points, so it is central to make sure you have reasonable points of comparisons:
* How would a random predictor perform (especially in classification problems)? Dataset can be unbalanced…
* What would the loss look like for a random predictor?
* What is (are) the best metric(s) to measure progress on my task?
* What are the limits of this metric? If it’s perfect, what can I conclude? What can’t I conclude?
* What is missing in “simple approaches” to reach a perfect score?
* Are there architectures in my neural network toolbox that would be good to model the inductive bias of the data?
## 3. 🦸♀️ Don’t be afraid to look under the hood of these 5-liners templates
Next, you can start building your model based on the insights and understanding you acquired previously. As mentioned earlier, implementing neural networks can quickly become quite tricky: there are many moving parts that work together (the optimizer, the model, the input processing pipeline, etc.), and many small things can go wrong when implementing these parts and connecting them to each other. **The challenge lies in the fact that you can make these mistakes, train a model without it ever crashing, and still get a decent performance…**
Yet, it is a good habit when you think you have finished implementing to **overfit a small batch of examples** (16 for instance). If your implementation is (nearly) correct, your model will be able to overfit and remember these examples by displaying a 0-loss (make sure you remove any form of regularization such as weight decay). If not, it is highly possible that you did something wrong in your implementation. In some rare cases, it means that your model is not expressive enough or lacks capacity. Again, **start with a small-scale model** (fewer layers for instance): you are looking to debug your model so you want a quick feedback loop, not a high performance.
> Pro-tip: in my experience working with pre-trained language models, freezing the embeddings modules to their pre-trained values doesn’t affect much the fine-tuning task performance while considerably speeding up the training.
Some common errors include:
* Wrong indexing… (these are really the worst 😅). Make sure you are gathering tensors along the correct dimensions for instance…
* You forgot to call `model.eval()` in evaluation mode (in PyTorch) or `model.zero\_grad()` to clean the gradients
* Something went wrong in the pre-processing of the inputs
* The loss got wrong arguments (for instance passing probabilities when it expects logits)
* Initialization doesn’t break the symmetry (usually happens when you initialize a whole matrix with a single constant value)
* Some parameters are never called during the forward pass (and thus receive no gradients)
* The learning rate is taking funky values like 0 all the time
* Your inputs are being truncated in a suboptimal way
> Pro-tip: when you work with language, have a serious **look at the outputs of the tokenizers**. I can’t count the number of lost hours I spent trying to reproduce results (and sometimes my own old results) because something went wrong with the tokenization.🤦♂️
Another useful tool is **deep-diving into the training dynamic** and plot (in Tensorboard for instance) the evolution of multiple scalars through training. At the bare minimum, you should look at the dynamic of your loss(es), the parameters, and their gradients.
As the loss decreases, you also want to look at the model’s predictions: either by evaluating on your development set or, my personal favorite, **print a couple of model outputs**. For instance, if you are training a machine translation model, it is quite satisfying to see the generations become more and more convincing through the training. You want to be more specifically careful about overfitting: your training loss continues to decreases while your evaluation loss is aiming at the stars.💫
## 4. 👀 Tune but don’t tune blindly
Once you have everything up and running, you might want to tune your hyperparameters to find the best configuration for your setup. I generally stick with a random grid search as it turns out to be fairly effective in practice.
> Some people report successes using fancy hyperparameter tuning methods such as Bayesian optimization but in my experience, random over a reasonably manually defined grid search is still a tough-to-beat baseline.
Most importantly, there is no point of launching 1000 runs with different hyperparameters (or architecture tweaks like activation functions): **compare a couple of runs with different hyperparameters to get an idea of which hyperparameters have the highest impact** but in general, it is delusional to expect to get your biggest jumps of performance by simply tuning a few values. For instance, if your best performing model is trained with a learning rate of 4e2, there is probably something more fundamental happening inside your neural network and you want to identify and understand this behavior so that you can re-use this knowledge outside of your current specific context.
On average, experts use fewer resources to find better solutions.
To conclude, a piece of general advice that has helped me become better at building neural networks is to **favor (as most as possible) a deep understanding of each component of your neural network instead of blindly (not to say magically) tweak the architecture**. Keep it simple and avoid small tweaks that you can’t reasonably justify even after trying really hard. Obviously, there is the right balance to find between a “trial-and-error” and an “analysis approach” but a lot of these intuitions feel more natural as you accumulate practical experience. **You too are training your internal model.** 🤯
A few related pointers to complete your reading:
* [Reproducibility (in ML) as a vehicle for engineering best practices](https://docs.google.com/presentation/d/1yHLPvPhUs2KGI5ZWo0sU-PKU3GimAk3iTsI38Z-B5Gw/edit#slide=id.p) from Joel Grus
* [Checklist for debugging neural networks](https://towardsdatascience.com/checklist-for-debugging-neural-networks-d8b2a9434f21) from Cecelia Shao
* [How to unit test machine learning code](https://medium.com/@keeper6928/how-to-unit-test-machine-learning-code-57cf6fd81765) from Chase Roberts
* [A recipe for Training Neural Networks](http://karpathy.github.io/2019/04/25/recipe/) from Andrej Karpathy
| 0 |
0 | hf_public_repos | hf_public_repos/blog/sql-console.md | ---
title: "Introducing the SQL Console on Datasets"
thumbnail: /blog/assets/sql_console/thumbnail.png
authors:
- user: cfahlgren1
---
Datasets use has been exploding and Hugging Face has become the default home for many datasets. Each month, as the amount of datasets uploaded to the Hub increases, so does the need to query, filter and discover them.

_Datasets created on Hugging Face Hub each month_
We are very excited to announce that you can now run SQL queries on your datasets directly in the Hugging Face Hub!
## Introducing the SQL Console for Datasets
On every dataset you should see a new **SQL Console** badge. With just one click you can open a SQL Console to query that dataset.
<figure class="image flex flex-col items-center text-center m-0 w-full">
<video
alt="SQL Console Demo"
autoplay loop autobuffer muted playsinline
>
<source src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/sql_console/Magpie-Ultra-Demo-SQL-Console.mp4" type="video/mp4">
</video>
<figcaption class="text-center text-sm italic">Querying the Magpie-Ultra dataset for excellent, high quality reasoning instructions.</figcaption>
</figure>
All the work is done in the browser and the console comes with a few neat features:
- **100% Local**: The SQL Console is powered by [DuckDB](https://duckdb.org/) WASM, so you can query your dataset without any dependencies.
- **Full DuckDB Syntax**: DuckDB has [full SQL](https://duckdb.org/docs/sql/introduction.html) syntax support, along with many built in functions for regex, lists, JSON, embeddings and more. You'll find DuckDB syntax to be very similar to PostgreSQL.
- **Export Results**: You can export the results of your query to parquet.
- **Shareable**: You can share your query results of public datasets with a link.
## How it works
### Parquet Conversion
Most datasets on Hugging Face are stored in Parquet, a columnar data format that is optimized for performance and storage efficiency. The Dataset Viewer on Hugging Face and the SQL Console load the data directly from the datasets Parquet files. And if the dataset is in another format, the first 5GB is auto-converted to Parquet. You can find more information about the Parquet conversion process in the [Dataset Viewer Parquet API documentation](https://huggingface.co/docs/dataset-viewer/en/parquet).
Using the Parquet files, the SQL Console creates views for you to query based on your dataset splits and configs.
### DuckDB WASM 🦆
[DuckDB WASM](https://duckdb.org/docs/api/wasm/overview.html) is the engine that powers the SQL Console. It is an in-process database engine that runs on Web Assembly in the browser. No server or backend needed.
By running solely in the browser, it gives the user the upmost flexibility to query data as they please without any dependencies. It also makes it really simple to share reproducible results with a simple link.
You may be wondering, _"Will it work for big datasets?"_ and the answer is, "Yes!".
Here's a query of the [OpenCo7/UpVoteWeb](https://huggingface.co/datasets/OpenCo7/UpVoteWeb) dataset which has `12.6M` rows in the Parquet conversion.

You can see we received results for a simple filter query in under 3 seconds.
While queries will take longer based on the size of the dataset and query complexity, you will be surprised about how much you can do with the SQL Console.
As with any technology, there are limitations.
- The SQL Console will work for a lot of queries. However, the memory limit is ~3GB, so it is possible to run out of memory and not be able to process the query (_Tip: try to use filters to reduce the amount of data you are querying along with `LIMIT`_).
- While DuckDB WASM is very powerful, it doesn't have full feature parity with DuckDB. For example, DuckDB WASM does not yet support the [`hf://` protocol to query datasets](https://github.com/duckdb/duckdb-wasm/discussions/1858).
### Example: Converting a dataset from Alpaca to conversations
Now that we've introduced the SQL Console, let's explore a practical example. When fine-tuning a Large Language Model (LLM), you often need to work with different data formats. One particularly popular format is the conversational format, where each row represents a multi-turn dialogue between a user and the model. The SQL Console can help us transform data into this format efficiently. Let's see how we can convert an Alpaca dataset to a conversational format using SQL.
Typically, developers would tackle this task with a Python pre-processing step, but we can show how to use the SQL Console to achieve the same in less than 30 seconds.
<iframe
src="https://huggingface.co/datasets/yahma/alpaca-cleaned/embed/viewer/default/train?sql=--+Convert+Alpaca+format+to+Conversation+format%0AWITH+%0Asource_view+AS+%28%0A++SELECT+*+FROM+train++--+Change+%27train%27+to+your+desired+view+name+here%0A%29%0ASELECT+%0A++%5B%0A++++struct_pack%28%0A++++++%22from%22+%3A%3D+%27user%27%2C%0A++++++%22value%22+%3A%3D+CASE+%0A+++++++++++++++++++WHEN+input+IS+NOT+NULL+AND+input+%21%3D+%27%27+%0A+++++++++++++++++++THEN+instruction+%7C%7C+%27%5Cn%5Cn%27+%7C%7C+input%0A+++++++++++++++++++ELSE+instruction%0A+++++++++++++++++END%0A++++%29%2C%0A++++struct_pack%28%0A++++++%22from%22+%3A%3D+%27assistant%27%2C%0A++++++%22value%22+%3A%3D+output%0A++++%29%0A++%5D+AS+conversation%0AFROM+source_view%0AWHERE+instruction+IS+NOT+NULL+%0AAND+output+IS+NOT+NULL%3B"
frameborder="0"
width="100%"
height="800px"
></iframe>
In the dataset above, click on the **SQL Console** badge to open the SQL Console. You should see the query below automatically populated.
When you are ready, click the **Run Query** button to execute the query.
### SQL
```sql
-- Convert Alpaca format to Conversation format
WITH
source_view AS (
SELECT * FROM train -- Change 'train' to your desired view name here
)
SELECT
[
struct_pack(
"from" := 'user',
"value" := CASE
WHEN input IS NOT NULL AND input != ''
THEN instruction || '\n\n' || input
ELSE instruction
END
),
struct_pack(
"from" := 'assistant',
"value" := output
)
] AS conversation
FROM source_view
WHERE instruction IS NOT NULL
AND output IS NOT NULL;
```
In the query we use the `struct_pack` function to create a new STRUCT row for each conversation.
DuckDB has great documentation on the `STRUCT` [Data Type](https://duckdb.org/docs/sql/data_types/struct.html) and [Functions](https://duckdb.org/docs/sql/functions/struct.html). You'll find many datasets contain columns with JSON data. DuckDB provides functions to easily parse and query these columns.

Once we have the results, we can download them as a Parquet file. You can see what the final output looks like below.
<iframe
src="https://huggingface.co/datasets/cfahlgren1/alpaca-conversational/embed/viewer/default/train"
frameborder="0"
width="100%"
height="560px"
></iframe>
**Try it out!**
As an another example, you can try a SQL Console query for [SkunkworksAI/reasoning-0.01](https://huggingface.co/datasets/SkunkworksAI/reasoning-0.01?sql_console=true&sql=--+Find+instructions+with+more+than+10+reasoning+steps%0Aselect+*+from+train%0Awhere+len%28reasoning_chains%29+%3E+10%0Alimit+100&sql_row=43) to see instructions with more than 10 reasoning steps.
## SQL Snippets
DuckDB has a ton of use cases that we are still exploring. We created a [SQL Snippets](https://huggingface.co/spaces/cfahlgren1/sql-snippets) space to showcase what you can do with the SQL Console.
Here are some really interesting use cases we have found:
- [Filtering a function calling dataset for a specific function with regex](https://x.com/qlhoest/status/1835687940376207651)
- [Finding the most popular base models from open-llm-leaderboard](https://x.com/polinaeterna/status/1834601082862842270)
- [Converting an alpaca dataset to a conversational format](https://x.com/calebfahlgren/status/1834674871688704144)
- [Performing similarity search with embeddings](https://x.com/andrejanysa/status/1834253758152269903)
- [Filtering 50k+ rows from a dataset for the highest quality, reasoning instructions](https://x.com/calebfahlgren/status/1835703284943749301)
Remember, it's one click to download your SQL results as a Parquet file and use for your dataset!
We would love to hear what you think of the SQL Console and if you have any feedback, please comment in this [post!](https://huggingface.co/posts/cfahlgren1/845769119345136)
## Resources
- [DuckDB WASM](https://duckdb.org/docs/api/wasm/overview.html)
- [DuckDB Syntax](https://duckdb.org/docs/sql/introduction.html)
- [DuckDB WASM Paper](https://www.vldb.org/pvldb/vol15/p3574-kohn.pdf)
- [Intro to Parquet Format](https://huggingface.co/blog/cfahlgren1/intro-to-parquet-format)
- [Hugging Face + DuckDB](https://huggingface.co/docs/hub/en/datasets-duckdb)
- [SQL Snippets Space](https://huggingface.co/spaces/cfahlgren1/sql-snippets) | 1 |
0 | hf_public_repos | hf_public_repos/blog/watermarking.md | ---
title: "AI Watermarking 101: Tools and Techniques"
thumbnail: /blog/assets/watermarking/thumbnail.png
authors:
- user: sasha
- user: yjernite
- user: derek-thomas
- user: EmilyWitko
- user: Ezi
- user: JJoe206
- user: reach-vb
- user: BrigitteTousi
- user: meg
---
# AI Watermarking 101: Tools and Techniques
In recent months, we've seen multiple news stories involving ‘deepfakes’, or AI-generated content: from [images of Taylor Swift](https://www.npr.org/2024/01/26/1227091070/deepfakes-taylor-swift-images-regulation) to [videos of Tom Hanks](https://www.theguardian.com/film/2023/oct/02/tom-hanks-dental-ad-ai-version-fake) and [recordings of US President Joe Biden](https://www.bbc.com/news/world-us-canada-68064247). Whether they are selling products, manipulating images of people without their consent, supporting phishing for private information, or creating misinformation materials intended to mislead voters, deepfakes are increasingly being shared on social media platforms. This enables them to be quickly propagated and have a wider reach and therefore, the potential to cause long-lasting damage.
In this blog post, we will describe approaches to carry out watermarking of AI-generated content, discuss their pros and cons, and present some of the tools available on the Hugging Face Hub for adding/detecting watermarks.
## What is watermarking and how does it work?
<figure class="image text-center">
<img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/blog/watermarking/fig1.png" alt="Dall-E 2 watermark">
<figcaption> Figure 1: OpenAI’s Dall-E 2 included a visible watermark composed of 5 blocks of different colors in the bottom right corner. Source: instagram.com/dailydall.e </figcaption>
</figure>
Watermarking is a method designed to mark content in order to convey additional information, such as authenticity. Watermarks in AI-generated content can range from fully visible (Figure 1) to invisible (Figure 2). In AI specifically, watermarking involves adding patterns to digital content (such as images), and conveying information regarding the provenance of the content; these patterns can then be recognized either by humans or algorithmically.
<figure class="image text-center">
<img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/blog/watermarking/fig2.png" alt="Invisible watermark">
<figcaption> Figure 2: Companies such as Imatag and Truepic have developed ways to embed imperceptible watermarks in AI-generated images. </figcaption>
</figure>
There are two primary methods for watermarking AI-generated content: the first occurs during content creation, which requires access to the model itself but can also be [more robust given that it is automatically embedded as part of the generation process](https://huggingface.co/blog/imatag-vch/stable-signature-bzh). The second method, which is implemented after the content is produced, can also be applied even to content from closed-source and proprietary models, with the caveat that it may not be applicable to all types of content (e.g., text).
### Data Poisoning and Signing Techniques
In addition to watermarking, several related techniques have a role to play in limiting non-consensual image manipulation. Some imperceptibly alter images you share online so that AI algorithms don’t process them well. Even though people can see the images normally, AI algorithms can’t access comparable content, and as a result, can't create new images. Some tools that imperceptibly alter images include Glaze and Photoguard. Other tools work to “poison” images so that they break the assumptions inherent in AI algorithm training, making it impossible for AI systems to learn what people look like based on the images shared online – this makes it harder for these systems to generate fake images of people. These tools include [Nightshade](https://nightshade.cs.uchicago.edu/whatis.html) and [Fawkes](http://sandlab.cs.uchicago.edu/fawkes/).
Maintaining content authenticity and reliability is also possible by utilizing "signing” techniques that link content to metadata about their provenance, such as the work of [Truepic](https://truepic.com/), which embeds [metadata following the C2PA standard](https://huggingface.co/spaces/Truepic/ai-content-credentials). Image signing can help understand where images come from. While metadata can be edited, systems such as Truepic help get around this limitation by 1) Providing certification to ensure that the validity of the metadata can be verified and 2) Integrating with watermarking techniques to make it harder to remove the information.
### Open vs Closed Watermarks
There are pros and cons of providing different levels of access to both watermarkers and detectors for the general public. Openness helps stimulate innovation, as developers can iterate on key ideas and create better and better systems. However, this must be balanced against malicious use. With open code in an AI pipeline calling a watermarker, it is trivial to remove the watermarking step. Even if that aspect of the pipeline is closed, then if the watermark is known and the watermarking code is open, malicious actors may read the code to figure out how to edit generated content in a way where the watermarking doesn't work. If access to a detector is also available, it's possible to continue editing something synthetic until the detector returns low-confidence, undoing what the watermark provides. There are hybrid open-closed approaches that directly address these issues. For instance, the Truepic watermarking code is closed, but they provide a public JavaScript library that can verify Content Credentials. The IMATAG code to call a watermarker during generation is open, but [the actual watermarker and the detector are private](https://huggingface.co/blog/imatag-vch/stable-signature-bzh).
## Watermarking Different Types of Data
While watermarking is an important tool across modalities (audio, images, text, etc.), each modality brings with it unique challenges and considerations. So, too, does the intent of the watermark: whether to prevent the usage of *training data* for training models, to protect content from being manipulated, to mark the *output* of models, or to *detect* AI-generated data. In the current section, we explore different modalities of data, the challenges they present for watermarking, and the open-source tools that exist on the Hugging Face Hub to carry out different types of watermarking.
### Watermarking Images
Probably the best known type of watermarking (both for content created by humans or produced by AI) is carried out on images. There have been different approaches proposed to tag training data to impact the outputs of models trained on it: the best-known method for this kind of ‘image cloaking’ approach is [“Nightshade”](https://arxiv.org/abs/2310.13828), which carries out tiny changes to images that are imperceptible to the human eye but that impact the quality of models trained on poisoned data. There are similar image cloaking tools available on the Hub - for instance, [Fawkes](https://huggingface.co/spaces/derek-thomas/fawkes), developed by the same lab that developed Nightshade, specifically targets images of people with the goal of thwarting facial recognition systems. Similarly, there’s also [Photoguard](https://huggingface.co/spaces/hadisalman/photoguard), which aims to guard images against manipulation using generative AI tools, e.g., for the creation of deepfakes based on them.
In terms of watermarking output images, there are two complementary approaches available on the Hub: [IMATAG](https://huggingface.co/spaces/imatag/stable-signature-bzh) (see Fig 2), which carries out watermarking during the generation of content by leveraging modified versions of popular models such as [Stable Diffusion XL Turbo](https://huggingface.co/stabilityai/sdxl-turbo), and [Truepic](https://huggingface.co/spaces/Truepic/watermarked-content-credentials), which adds invisible content credentials after an image has been generated.
TruePic also embeds C2PA content credentials into the images, which enables the storage of metadata regarding image provenance and generation in the image itself. Both the IMATAG and TruePic Spaces also allow for the detection of images watermarked by their systems. Both of these detection tools work with their respective approaches (i.e., they are approach-specific). There is an existing general [deepfake detection Space](https://huggingface.co/spaces/Wvolf/CNN_Deepfake_Image_Detection) on the Hub, but in our experience, we found that these solutions have variable performance depending on the quality of the image and the model used.
### Watermarking Text
While watermarking AI-generated images can seem more intuitive – given the strongly visual nature of this content – text is a whole different story… How do you add watermarks to written words and numbers (tokens)? Well, the current approaches for watermarking rely on promoting sub-vocabularies based on the previous text. Let's dive into what this would look like for LLM-generated text.
During the generation process, an LLM outputs [a list of logits for the next token](https://huggingface.co/docs/transformers/main_classes/output#transformers.modeling_outputs.CausalLMOutput.logits) before it carries out sampling or greedy decoding. Based on the previous generated text, most approaches split all candidate tokens into 2 groups – call them “red” and “green”. The “red” tokens will be restricted, and the “green” group will be promoted. This can happen by disallowing the red group tokens altogether (Hard Watermark), or by increasing the probability of the green group (Soft Watermark). The more we change the original probabilities, the higher our watermarking strength. [WaterBench](https://huggingface.co/papers/2311.07138) has created a benchmark dataset to facilitate comparison of performance across watermarking algorithms while controlling the watermarking strength for apples-to-apples comparisons.
Detection works by determining what “color” each token is, and then calculating the probability that the input text comes from the model in question. It’s worth noting that shorter texts have a much lower confidence, since there are less tokens to examine.
<figure class="image text-center">
<img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/blog/watermarking/fig3.png" alt="Text watermark">
<figcaption> Figure 3: The <a href="https://huggingface.co/spaces/tomg-group-umd/lm-watermarking">Watermark for LLMs Space</a> showing red and green tokens on synthetic text that represent the probability that these are AI-generated. </figcaption>
</figure>
There are a couple of ways you can easily implement watermarking for LLMs on the Hugging Face Hub. [The Watermark for LLMs Space](https://huggingface.co/spaces/tomg-group-umd/lm-watermarking) (see Fig. 3) demonstrates this, using an [LLM watermarking approach](https://huggingface.co/papers/2301.10226) on models such as OPT and Flan-T5. For production level workloads, you can use our [Text Generation Inference toolkit](https://huggingface.co/docs/text-generation-inference/index), which implements the same watermarking algorithm and sets the [corresponding parameters](https://huggingface.co/docs/text-generation-inference/main/en/basic_tutorials/launcher#watermarkgamma) and can be used with any of the latest models!
Similar to universal watermarking of AI-generated images, it is yet to be proven whether universally watermarking text is possible. Approaches such as [GLTR](http://gltr.io/) are meant to be robust for any accessible language model (given that they rely upon comparing the logits of generated text to those of different models). Detecting whether a given text was generated using a language model without having access to that model (either because it’s closed-source or because you don’t know which model was used to generate the text) is currently impossible.
As we discussed above, detection methods for generated text require a large amount of text to be reliable. Even then, detectors can have high false positive rates, incorrectly labeling text written by people as synthetic. Indeed, [OpenAI removed their in-house detection tool](https://www.pcmag.com/news/openai-quietly-shuts-down-ai-text-detection-tool-over-inaccuracies) in 2023 given low accuracy rate, which came with [unintended consequences](https://www.rollingstone.com/culture/culture-features/texas-am-chatgpt-ai-professor-flunks-students-false-claims-1234736601/) when it was used by teachers to gauge whether the assignments submitted by their students were generated using ChatGPT or not.
### Watermarking Audio
The data extracted from a person's voice (voiceprint), is often used as a biometric security authentication mechanism to identify an individual. While generally paired with other security factors such as PIN or password, a breach of this biometric data still presents a risk and can be used to gain access to, e.g., bank accounts, given that many banks use voice recognition technologies to verify clients over the phone. As voice becomes easier to replicate with AI, we must also improve the techniques to validate the authenticity of voice audio. Watermarking audio content is similar to watermarking images in the sense that there is a multidimensional output space that can be used to inject metadata regarding provenance. In the case of audio, the watermarking is usually carried out on frequencies that are imperceptible to human ears (below ~20 or above ~20,000 Hz), which can then be detected using AI-driven approaches.
Given the high-stakes nature of audio output, watermarking audio content is an active area of research, and multiple approaches (e.g., [WaveFuzz](https://arxiv.org/abs/2203.13497), [Venomave](https://ieeexplore.ieee.org/abstract/document/10136135)) have been proposed over the last few years.
[AudioSeal](https://github.com/facebookresearch/audioseal) is a method for speech localized watermarking, with state-of-the-art detector speed without compromising the watermarking robustness. It jointly trains a generator that embeds a watermark in the audio, and a detector that detects the watermarked fragments in longer audios, even in the presence of editing. Audioseal achieves state-of-the-art detection performance of both natural and synthetic speech at the sample level (1/16k second resolution), it generates limited alteration of signal quality and is robust to many types of audio editing.
<figure class="image text-center">
<img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/blog/watermarking/fig4.png" alt="AudioSeal watermarking approach.">
<figcaption> Figure 4 - Pipeline of AudioSeal watermarking and detection. Source: <a href="https://github.com/facebookresearch/audioseal">GitHub (AudioSeal)</a> </figcaption>
</figure>
AudioSeal was also used to release [SeamlessExpressive](https://huggingface.co/spaces/facebook/seamless-expressive) and [SeamlessStreaming](https://huggingface.co/spaces/facebook/seamless-streaming) demos with mechanisms for safety.
## Conclusion
Disinformation, being accused of producing synthetic content when it's real, and instances of inappropriate representations of people without their consent can be difficult and time-consuming; much of the damage is done before corrections and clarifications can be made. As such, as part of our mission to democratize good machine learning, we at Hugging Face believe that having mechanisms to identify AI-Generated content quickly and systematically are important. AI watermarking is not foolproof, but can be a powerful tool in the fight against malicious and misleading uses of AI.
## Relevant press stories
- [It Doesn't End With Taylor Swift: How to Protect Against AI Deepfakes and Sexual Harassment | PopSugar](https://www.popsugar.com/tech/ai-deepfakes-taylor-swift-sexual-harassment-49334216) (@meg)
- [Three ways we can fight deepfake porn | MIT Technology Review ](https://www.technologyreview.com/2024/01/29/1087325/three-ways-we-can-fight-deepfake-porn-taylors-version/) (@sasha)
- [Gun violence killed them. Now, their voices will lobby Congress to do more using AI | NPR](https://www.npr.org/2024/02/14/1231264701/gun-violence-parkland-anniversary-ai-generated-voices-congress) (@irenesolaiman)
- [Google DeepMind has launched a watermarking tool for AI-generated images | MIT Technology Review](https://www.technologyreview.com/2023/08/29/1078620/google-deepmind-has-launched-a-watermarking-tool-for-ai-generated-images/) (@sasha)
- [Invisible AI watermarks won’t stop bad actors. But they are a ‘really big deal’ for good ones | VentureBeat](https://venturebeat.com/ai/invisible-ai-watermarks-wont-stop-bad-actors-but-they-are-a-really-big-deal-for-good-ones/) (@meg)
- [A watermark for chatbots can expose text written by an AI | MIT Technology Review](https://www.technologyreview.com/2023/01/27/1067338/a-watermark-for-chatbots-can-spot-text-written-by-an-ai/) (@irenesolaiman)
- [Hugging Face empowers users with deepfake detection tools | Mashable](https://mashable.com/article/hugging-face-empowers-users-ai-deepfake-detetection-tools) (@meg)
| 2 |
0 | hf_public_repos | hf_public_repos/blog/community-update.md | ---
title: Introducing Pull Requests and Discussions 🥳
thumbnail: /blog/assets/76_community_update/thumbnail.png
---
# Introducing Pull Requests and Discussions 🥳

We are thrilled to announce the release of our latest collaborative features: pull requests and discussions on the Hugging Face Hub!
Pull requests and discussions are available today under the [community tab](https://huggingface.co/gpt2/discussions) for all repository types: models, datasets, and Spaces. Any member of the community can create and participate in discussions and pull requests, facilitating collaborations not only within teams, but also with everyone else in the community!
It's the biggest update ever done to the Hub, and we can't wait to see the community members start collaborating with it 🤩.
The new "Community" tab also aligns with proposals in ethical ML throughout the years. Feedback and iterations have a central place in the development of ethical machine learning software. We really believe having it in the community's toolset will unlock new kinds of positive patterns in ML, collaborations, and progress.
Some example use cases for discussions and pull requests:
- Propose suggestions in model cards to improve disclosures of ethical biases.
- Let users flag concerning generations of a given Space demo.
- Provide a venue through which model and dataset authors can have a direct discussion with community members.
- Allow others to improve your repositories! For example, users might want to provide TensorFlow weights!
## Discussions

[Discussions](https://huggingface.co/gpt2/discussions?type=discussion) allow community members ask and answer questions as well as share their ideas and suggestions directly with the repository owners and the community. Anyone can create and participate in discussions in the community tab of a repository.
## Pull requests

[Pull requests](https://huggingface.co/gpt2/discussions?type=pull_request) allow community members open, comment, merge, or close pull requests directly from the website. The easiest way to open a pull request is to use the "Collaborate" button in the "Files and versions" tab. It will let you do single file contributions very easily.
Under the hood, our Pull requests do not use forks and branches, but instead, custom "branches" called `refs` that are stored directly on the source repo. This approach to avoids the need to create a forks for each new version of the model/dataset.
## How is this different from other git hosts
At a high level, we aim to build a simpler version of other git hosts' (like GitHub's) PRs and Issues:
- no forks are involved: contributors push to a special `ref` branch directly on the source repo
- no hard distinction between issues and PRs: they are essentially the same so we display them in the same lists
- streamlined for ML (i.e. models/datasets/Spaces repos), not arbitrary repos
## What's next
Of course, it's only the beginning. We will listen to the community feedback to add new features and improve the community tab in the future. If you have any feedback, you can [join the discussion here](https://huggingface.co/spaces/huggingface/HuggingDiscussions/discussions/1). Today is the best time to join your first discussion and open a PR! 🤗
| 3 |
0 | hf_public_repos | hf_public_repos/blog/websight.md | ---
title: "Unlocking the conversion of Web Screenshots into HTML Code with the WebSight Dataset"
thumbnail: /blog/assets/websight/thumbnail.png
authors:
- user: HugoLaurencon
- user: Leyo
- user: VictorSanh
---
# From screenshots to HTML code: Introducing the WebSight dataset
In the world of web development, turning designs into functional websites usually involves a lot of coding and careful testing. What if we could simplify this process, making it possible to convert web designs into working websites more easily and quickly? WebSight is a new dataset that aims at building AI systems capable of transforming screenshots to HTML code.
## The challenge
Turning a website design or screenshot into HTML code usually needs an experienced developer. But what if this could be more efficient? Motivated by this question, we investigated how vision-language models (VLMs) could be used in web development to create low-code solutions that improve efficiency.
Today, the main challenge towards that goal is the lack of high-quality datasets tailored for this task. WebSight aims to fill that gap.
## WebSight: A large synthetic dataset of screenshot/HTML code pairs
In January 2024, we introduced [WebSight-v0.1](https://huggingface.co/datasets/HuggingFaceM4/WebSight), a synthetic dataset that consists of 823,000 pairs of HTML codes and their corresponding screenshots. This dataset is designed to train AI models to process and translate visual web designs into functional HTML code. By focusing on synthetic data, we've managed to bypass the noise and complexity often found in real-world HTML, allowing AI models to learn efficiently.
Following our initial release and building on top of the community’s feedback, we've updated our dataset to WebSight-v0.2, introducing significant improvements. These enhancements feature using real images in the screenshots, and switching to Tailwind CSS (instead of traditional CSS). We further scaled the dataset to 2 million examples.
<figure align="center">
<img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/blog/websight/websight_examples_2.jpg" width="800" alt="Examples of web pages included in WebSight"/>
<figcaption>Examples of web pages included in WebSight.</figcaption>
</figure>
## Sightseer: A model fine-tuned on WebSight
Leveraging the WebSight dataset, we’ve fine-tuned our forthcoming foundation vision-language model to obtain Sightseer, a model capable of converting webpage screenshots into functional HTML code. Sightseer additionally demonstrates the capability to incorporate images into the generated HTML that closely resemble those in the original screenshots.
<figure align="center">
<img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/blog/websight/main_generation_2.jpg" width="1000" alt="Comparison of an original web page (input) on the left, and the rendering of the code generated by our model, Sightseer, (output) on the right."/>
<figcaption>Comparison of an original web page (input) on the left, and the rendering of the code generated by our model, Sightseer, (output) on the right.</figcaption>
</figure>
## Towards more powerful tools unlocked by visual language models
By iterating over WebSight, our goal is to build more capable AI systems that simplify the process of turning UI designs into functional code. This could reduce iteration time for developers by transforming a paper UI sketch into functional code rapidly, while making this process more accessible for non-developers. This is one of the many real applications of visual language models.. By open-sourcing WebSight, we encourage the community to work with us toward building more powerful tools for UI development.
## Resources
- Dataset: https://huggingface.co/datasets/HuggingFaceM4/WebSight
- Technical report: http://arxiv.org/abs/2403.09029
- Google colab: https://colab.research.google.com/drive/1LdamGKR2oacrDk-kYwz_Wfc1-RBUdzcO?usp=sharing
| 4 |
0 | hf_public_repos | hf_public_repos/blog/accelerate-deepspeed.md | ---
title: "Accelerate Large Model Training using DeepSpeed"
thumbnail: /blog/assets/83_accelerate_deepspeed/deepspeed-thumbnail.png
authors:
- user: smangrul
- user: sgugger
---
# Accelerate Large Model Training using DeepSpeed
In this post we will look at how we can leverage the **[Accelerate](https://github.com/huggingface/accelerate)** library for training large models which enables users to leverage the ZeRO features of **[DeeSpeed](https://www.deepspeed.ai)**.
## Motivation 🤗
**Tired of Out of Memory (OOM) errors while trying to train large models? We've got you covered. Large models are very performant [1] but difficult to train with the available hardware. To get the most of the available hardware for training large models one can leverage Data Parallelism using ZeRO - Zero Redundancy Optimizer [2]**.
Below is a short description of Data Parallelism using ZeRO with diagram from this [blog post](https://www.microsoft.com/en-us/research/blog/zero-deepspeed-new-system-optimizations-enable-training-models-with-over-100-billion-parameters/)

(Source: [link](https://www.microsoft.com/en-us/research/blog/zero-deepspeed-new-system-optimizations-enable-training-models-with-over-100-billion-parameters/))
a. **Stage 1** : Shards optimizer states across data parallel workers/GPUs
b. **Stage 2** : Shards optimizer states + gradients across data parallel workers/GPUs
c. **Stage 3**: Shards optimizer states + gradients + model parameters across data parallel workers/GPUs
d. **Optimizer Offload**: Offloads the gradients + optimizer states to CPU/Disk building on top of ZERO Stage 2
e. **Param Offload**: Offloads the model parameters to CPU/Disk building on top of ZERO Stage 3
In this blogpost we will look at how to leverage Data Parallelism using ZeRO using Accelerate. **[DeepSpeed](https://github.com/microsoft/deepspeed)**, **[FairScale](https://github.com/facebookresearch/fairscale/)** and **[PyTorch FullyShardedDataParallel (FSDP)](https://pytorch.org/blog/introducing-pytorch-fully-sharded-data-parallel-api/)** have implemented the core ideas of the ZERO paper. These have already been integrated in 🤗 `transformers` Trainer and 🤗 `accelerate` accompanied by great blogs [Fit More and Train Faster With ZeRO via DeepSpeed and FairScale](https://huggingface.co/blog/zero-deepspeed-fairscale) [4] and [Accelerate Large Model Training using PyTorch Fully Sharded Data Parallel](https://huggingface.co/blog/pytorch-fsdp) [5]. We defer the explanation of what goes behind the scenes to those blogs and mainly focus on leveraging DeepSpeed ZeRO using Accelerate.
## Accelerate 🚀: Leverage DeepSpeed ZeRO without any code changes
**Hardware setup**: 2X24GB NVIDIA Titan RTX GPUs. 60GB RAM.
We will look at the task of finetuning encoder-only model for text-classification. We will use pretrained `microsoft/deberta-v2-xlarge-mnli` (900M params) for finetuning on MRPC GLUE dataset.
The code is available here [run_cls_no_trainer.py](https://github.com/pacman100/accelerate-deepspeed-test/blob/main/src/modeling/run_cls_no_trainer.py). It is similar to the official text-classification example [here](https://github.com/huggingface/transformers/blob/main/examples/pytorch/text-classification/run_glue_no_trainer.py) with the addition of logic to measure train and eval time. Let's compare performance between Distributed Data Parallel (DDP) and DeepSpeed ZeRO Stage-2 in a Multi-GPU Setup.
To enable DeepSpeed ZeRO Stage-2 without any code changes, please run `accelerate config` and leverage the [Accelerate DeepSpeed Plugin](https://huggingface.co/docs/accelerate/deepspeed#accelerate-deepspeed-plugin).
**ZeRO Stage-2 DeepSpeed Plugin Example**
```bash
compute_environment: LOCAL_MACHINE
deepspeed_config:
gradient_accumulation_steps: 1
gradient_clipping: 1.0
offload_optimizer_device: none
offload_param_device: none
zero3_init_flag: false
zero_stage: 2
distributed_type: DEEPSPEED
fsdp_config: {}
machine_rank: 0
main_process_ip: null
main_process_port: null
main_training_function: main
mixed_precision: fp16
num_machines: 1
num_processes: 2
use_cpu: false
```
Now, run below command for training:
```bash
accelerate launch run_cls_no_trainer.py \
--model_name_or_path "microsoft/deberta-v2-xlarge-mnli" \
--task_name "mrpc" \
--ignore_mismatched_sizes \
--max_length 128 \
--per_device_train_batch_size 40 \
--learning_rate 2e-5 \
--num_train_epochs 3 \
--output_dir "/tmp/mrpc/deepspeed_stage2/" \
--with_tracking \
--report_to "wandb" \
```
In our Single-Node Multi-GPU setup, the maximum batch size that DDP supports without OOM error is 8. In contrast, DeepSpeed Zero-Stage 2 enables batch size of 40 without running into OOM errors. Therefore, DeepSpeed enables to fit **5X** more data per GPU when compared to DDP. Below is the snapshot of the plots from wandb [run](https://wandb.ai/smangrul/DDP_vs_DeepSpeed_cls_task?workspace=user-smangrul) along with benchmarking table comparing DDP vs DeepSpeed.

---
| Method | Batch Size Max | Train time per epoch (seconds) | Eval time per epoch (seconds) | F1 score | Accuracy |
| --- | --- | --- | --- | --- | --- |
| DDP (Distributed Data Parallel) | 8 | 103.57 | 2.04 | 0.931 | 0.904 |
| DeepSpeed ZeRO Stage 2 | **40** | **28.98** | **1.79** | **0.936** | **0.912** |
Table 1: Benchmarking DeepSpeed ZeRO Stage-2 on DeBERTa-XL (900M) model
---
With this bigger batch size, we observe ~**3.5X** speed up in total training time without any drop in perforamnce metrics, all this without changing any code. Yay! 🤗.
To be able to tweak more options, you will need to use a DeepSpeed config file and minimal code changes. Let's see how to do this.
## Accelerate 🚀: Leverage a DeepSpeed Config file to tweak more options
First, We will look at the task of finetuning a sequence-to-sequence model for training our own Chatbot. Specifically, we will finetune `facebook/blenderbot-400M-distill` on the [smangrul/MuDoConv](https://huggingface.co/datasets/smangrul/MuDoConv) (Multi-Domain Conversation) dataset. The dataset contains conversations from 10 different data sources covering personas, grounding in specific emotional contexts, goal-oriented (e.g., restaurant reservation) and general wikipedia topics (e.g, Cricket).
The code is available here [run_seq2seq_no_trainer.py](https://github.com/pacman100/accelerate-deepspeed-test/blob/main/src/modeling/run_seq2seq_no_trainer.py). Current pratice to effectively measure the `Engagingness` and `Humanness` of Chatbots is via Human evlauations which are expensive [6]. As such for this example, the metric being tracked is BLEU score (which isn't ideal but is the conventional metric for such tasks). One can adapt the code to train larger T5 models if you have access to GPUs that support `bfloat16` precision else you will run into `NaN` loss values. We will run a quick benchmark on `10000` train samples and `1000` eval samples as we are interested in DeepSpeed vs DDP.
We will leverage the DeepSpeed Zero Stage-2 config [zero2_config_accelerate.json](https://github.com/pacman100/accelerate-deepspeed-test/blob/main/src/modeling/configs/zero2_config_accelerate.json) (given below) For training. for detailed information on the various config features, please refer [DeeSpeed](https://www.deepspeed.ai) documentation.
```json
{
"fp16": {
"enabled": "true",
"loss_scale": 0,
"loss_scale_window": 1000,
"initial_scale_power": 15,
"hysteresis": 2,
"min_loss_scale": 1
},
"optimizer": {
"type": "AdamW",
"params": {
"lr": "auto",
"weight_decay": "auto",
"torch_adam": true,
"adam_w_mode": true
}
},
"scheduler": {
"type": "WarmupDecayLR",
"params": {
"warmup_min_lr": "auto",
"warmup_max_lr": "auto",
"warmup_num_steps": "auto",
"total_num_steps": "auto"
}
},
"zero_optimization": {
"stage": 2,
"allgather_partitions": true,
"allgather_bucket_size": 2e8,
"overlap_comm": true,
"reduce_scatter": true,
"reduce_bucket_size": 2e8,
"contiguous_gradients": true
},
"gradient_accumulation_steps": 1,
"gradient_clipping": "auto",
"steps_per_print": 2000,
"train_batch_size": "auto",
"train_micro_batch_size_per_gpu": "auto",
"wall_clock_breakdown": false
}
```
To enable DeepSpeed ZeRO Stage-2 with above config, please run `accelerate config` and provide the config file path when asked. For more details, refer the 🤗 `accelerate` official documentation for [DeepSpeed Config File](https://huggingface.co/docs/accelerate/deepspeed#deepspeed-config-file).
**ZeRO Stage-2 DeepSpeed Config File Example**
```bash
compute_environment: LOCAL_MACHINE
deepspeed_config:
deepspeed_config_file: /path/to/zero2_config_accelerate.json
zero3_init_flag: false
distributed_type: DEEPSPEED
fsdp_config: {}
machine_rank: 0
main_process_ip: null
main_process_port: null
main_training_function: main
mixed_precision: fp16
num_machines: 1
num_processes: 2
use_cpu: false
```
Now, run below command for training:
```bash
accelerate launch run_seq2seq_no_trainer.py \
--dataset_name "smangrul/MuDoConv" \
--max_source_length 128 \
--source_prefix "chatbot: " \
--max_target_length 64 \
--val_max_target_length 64 \
--val_min_target_length 20 \
--n_val_batch_generations 5 \
--n_train 10000 \
--n_val 1000 \
--pad_to_max_length \
--num_beams 10 \
--model_name_or_path "facebook/blenderbot-400M-distill" \
--per_device_train_batch_size 200 \
--per_device_eval_batch_size 100 \
--learning_rate 1e-6 \
--weight_decay 0.0 \
--num_train_epochs 1 \
--gradient_accumulation_steps 1 \
--num_warmup_steps 100 \
--output_dir "/tmp/deepspeed_zero_stage2_accelerate_test" \
--seed 25 \
--logging_steps 100 \
--with_tracking \
--report_to "wandb" \
--report_name "blenderbot_400M_finetuning"
```
When using DeepSpeed config, if user has specified `optimizer` and `scheduler` in config, the user will have to use `accelerate.utils.DummyOptim` and `accelerate.utils.DummyScheduler`. Those are the only minor changes that the user has to do. Below we show an example of the minimal changes required when using DeepSpeed config:
```diff
- optimizer = torch.optim.Adam(optimizer_grouped_parameters, lr=args.learning_rate)
+ optimizer = accelerate.utils.DummyOptim(optimizer_grouped_parameters, lr=args.learning_rate)
- lr_scheduler = get_scheduler(
- name=args.lr_scheduler_type,
- optimizer=optimizer,
- num_warmup_steps=args.num_warmup_steps,
- num_training_steps=args.max_train_steps,
- )
+ lr_scheduler = accelerate.utils.DummyScheduler(
+ optimizer, total_num_steps=args.max_train_steps, warmup_num_steps=args.num_warmup_steps
+ )
```
---
| Method | Batch Size Max | Eval Size Max | Train time per epoch (seconds) | Eval time per epoch (seconds) |
| --- | --- | --- | --- | --- |
| DDP (Distributed Data Parallel) | 100 | 50 | 27.36 | 48.41 |
| DeepSpeed ZeRO Stage 2 | **200** | **100** | **19.06** | **39.27** |
Table 2: Benchmarking DeepSpeed ZeRO Stage-2 on BlenderBot (400M) model
In our Single-Node Multi-GPU setup, the maximum batch size that DDP supports without OOM error is 100. In contrast, DeepSpeed Zero-Stage 2 enables batch size of 200 without running into OOM errors. Therefore, DeepSpeed enables to fit **2X** more data per GPU when compared to DDP. We observe ~**1.44X** speedup in training and ~**1.23X** speedup in evaluation as we are able to fit more data on the same available hardware. As this model is of medium size, the speedup isn't that exciting but this will improve with bigger models. You can chat with the Chatbot trained using the entire data at 🤗 Space [smangrul/Chat-E](https://huggingface.co/spaces/smangrul/Chat-E). You can give bot a persona, ground conversation to a particular emotion, use to in goal-oriented tasks or in a free flow manner. Below is a fun conversation with the chatbot 💬. You can find snapshots of more conversations using different contexts [here](https://github.com/pacman100/accelerate-deepspeed-test/tree/main/src/chatbot_snapshots).

---
### CPU/Disk Offloading to enable training humongous models that won’t fit the GPU memory
On a single 24GB NVIDIA Titan RTX GPU, one cannot train GPT-XL Model (1.5B parameters) even with a batch size of 1. We will look at how we can use DeepSpeed ZeRO Stage-3 with CPU offloading of optimizer states, gradients and parameters to train GPT-XL Model.
We will leverage the DeepSpeed Zero Stage-3 CPU offload config [zero3_offload_config_accelerate.json](https://github.com/pacman100/accelerate-deepspeed-test/blob/main/src/modeling/configs/zero3_offload_config_accelerate.json) (given below) for training. The rest of the process of using the config with 🤗 `accelerate` is similar to the above experiment.
```json
{
"fp16": {
"enabled": true,
"loss_scale": 0,
"loss_scale_window": 1000,
"initial_scale_power": 16,
"hysteresis": 2,
"min_loss_scale": 1
},
"optimizer": {
"type": "AdamW",
"params": {
"lr": "auto",
"weight_decay": "auto"
}
},
"scheduler": {
"type": "WarmupDecayLR",
"params": {
"warmup_min_lr": "auto",
"warmup_max_lr": "auto",
"warmup_num_steps": "auto",
"total_num_steps": "auto"
}
},
"zero_optimization": {
"stage": 3,
"offload_optimizer": {
"device": "cpu",
"pin_memory": true
},
"offload_param": {
"device": "cpu",
"pin_memory": true
},
"overlap_comm": true,
"contiguous_gradients": true,
"reduce_bucket_size": "auto",
"stage3_prefetch_bucket_size": "auto",
"stage3_param_persistence_threshold": "auto",
"sub_group_size": 1e9,
"stage3_max_live_parameters": 1e9,
"stage3_max_reuse_distance": 1e9,
"stage3_gather_16bit_weights_on_model_save": true
},
"gradient_accumulation_steps": 1,
"gradient_clipping": "auto",
"steps_per_print": 2000,
"train_batch_size": "auto",
"train_micro_batch_size_per_gpu": "auto",
"wall_clock_breakdown": false
}
```
**ZeRO Stage-3 CPU Offload DeepSpeed Config File Example**
```bash
compute_environment: LOCAL_MACHINE
deepspeed_config:
deepspeed_config_file: /path/to/zero3_offload_config_accelerate.json
zero3_init_flag: true
distributed_type: DEEPSPEED
fsdp_config: {}
machine_rank: 0
main_process_ip: null
main_process_port: null
main_training_function: main
mixed_precision: fp16
num_machines: 1
num_processes: 2
use_cpu: false
```
Now, run below command for training:
```bash
accelerate launch run_clm_no_trainer.py \
--config_name "gpt2-xl" \
--tokenizer_name "gpt2-xl" \
--dataset_name "wikitext" \
--dataset_config_name "wikitext-2-raw-v1" \
--block_size 128 \
--output_dir "/tmp/clm_deepspeed_stage3_offload__accelerate" \
--learning_rate 5e-4 \
--per_device_train_batch_size 16 \
--per_device_eval_batch_size 1 \
--num_train_epochs 1 \
--with_tracking \
--report_to "wandb"\
```
---
| Method | Batch Size Max | Train time per epoch (seconds) | Notes |
| --- | --- | --- | --- |
| DDP (Distributed Data Parallel) | - | - | OOM Error
| DeepSpeed ZeRO Stage 3 | **16** | 6608.35 | |
Table 3: Benchmarking DeepSpeed ZeRO Stage-3 CPU Offload on GPT-XL (1.5B) model
---
DDP will result in OOM error even with batch size 1. On the other hand, with DeepSpeed ZeRO Stage-3 CPU offload, we can train with a batch size of 16.
Finally, please, remember that, 🤗 `Accelerate` only integrates DeepSpeed, therefore if you
have any problems or questions with regards to DeepSpeed usage, please, file an issue with [DeepSpeed GitHub](https://github.com/microsoft/DeepSpeed/issues).
## References
[1] [Train Large, Then Compress: Rethinking Model Size for Efficient Training and Inference of Transformers](http://nlp.cs.berkeley.edu/pubs/Li-Wallace-Shen-Lin-Keutzer-Klein-Gonzalez_2020_Transformers_paper.pdf)
[2] [ZeRO: Memory Optimizations Toward Training Trillion Parameter Models](https://arxiv.org/pdf/1910.02054v3.pdf)
[3] [DeepSpeed: Extreme-scale model training for everyone - Microsoft Research](https://www.microsoft.com/en-us/research/blog/deepspeed-extreme-scale-model-training-for-everyone/)
[4] [Fit More and Train Faster With ZeRO via DeepSpeed and FairScale](https://huggingface.co/blog/zero-deepspeed-fairscale)
[5] [Accelerate Large Model Training using PyTorch Fully Sharded Data Parallel](https://huggingface.co/blog/pytorch-fsdp)
[6] [Recipes for building an open-domain chatbot](https://arxiv.org/pdf/2004.13637.pdf)
| 5 |
0 | hf_public_repos | hf_public_repos/blog/bloom-inference-optimization.md | ---
title: "Optimization story: Bloom inference"
thumbnail: /blog/assets/bloom-inference-pytorch-scripts/thumbnail.png
authors:
- user: Narsil
---
# Optimization story: Bloom inference
This article gives you the behind-the-scenes of how we made an efficient inference server that powers bloom.
inference server that powers [https://huggingface.co/bigscience/bloom]().
We achieved a 5x latency reduction over several weeks (and 50x more throughput). We wanted to share all the struggles and epic wins we went through to achieve such speed improvements.
A lot of different people were involved at many stages so not everything will be covered here. And please bear with us, some of the content might be outdated or flat out wrong because
we're still learning how to optimize extremely large models and lots of new
hardware features and content keep coming out regularly.
If your favorite flavor of optimizations
is not discussed or improperly represented, we're sorry, please share it with us
we're more than happy to try out new stuff and correct our mistakes.
## Creating BLOOM
This goes without saying but without the large model being accessible in the first
place, there would be no real reasons to optimize inference for it. This was an
incredible effort led by many different people.
To maximize the GPU during training, several solutions were explored
and in the end, [Megatron-Deepspeed](https://github.com/bigscience-workshop/Megatron-DeepSpeed) was chosen to train the end model.
This meant that the code as-is wasn't necessarily compatible with the `transformers`
library.
## Porting to transformers
Because of the original training code, we set out to do something which we regularly
do: port an existing model to `transformers`. The goal was to extract from the
training code the relevant parts and implement it within `transformers`.
This effort was tackled by [Younes](/ybelkada).
This is by no means a small effort as it took almost a month and [200 commits](https://github.com/huggingface/transformers/pull/17474/commits) to get there.
There are several things to note that will come back later:
We needed to have smaller models [bigscience/bigscience-small-testing](https://huggingface.co/bigscience/bigscience-small-testing) and [bigscience/bloom-560m](https://huggingface.co/bigscience/bloom-560m).
This is extremely important because they are smaller, so everything is faster when
working with them.
First, you have to abandon all hope to have exactly the same logits at the end down
to the bytes. PyTorch versions can change the kernels and introduce subtle differences, and different hardware
might yield different results because of different architecture (and you probably
don't want to develop on a A100 GPU all the time for cost reasons).
***Getting a good strict test suite is really important for all models***
The best test we found was having a fixed set of prompts. You know the prompt,
you know the completion that needs to be deterministic so greedy.
If two generations are identical, you can basically ignore small logits differences
Whenever you see a drift, you need to investigate. It could be that your code
is not doing what it should OR that you are actually out of domain for that model
and therefore the model is more sensitive to noise. If you have several prompts
and long enough prompts, you're less likely to trigger that for all prompts by
accident. The more prompts the better, the longer the better.
The first model (small-testing) is in `bfloat16` like the big bloom so
everything should be very similar, but it wasn't trained a lot or just doesn't perform
well, so it highly fluctuates in outputs. That means we had issues with those generation
tests. The second model is more stable but was trained and saved in `float16` instead
of `bfloat16`. That's more room for error between the two.
To be perfectly fair `bfloat16` -> `float16` conversion seemed to be OK in inference
mode (`bfloat16` mostly exists to handle large gradients, which do not exist in inference).
During that step, one important tradeoff was discovered and implemented.
Because bloom was trained in a distributed setting, part of the code was doing
Tensor parallelism on a Linear layer meaning running the same operation as a single
operation on a single GPU was giving [different results](https://github.com/huggingface/transformers/blob/main/src/transformers/models/bloom/modeling_bloom.py#L350).
This took a while to pinpoint and either we went for 100% compliance and the model
was much slower, or we would take a small difference in generation
but was much faster to run and simpler code. We opted for a configurable flag.
## First inference (PP + Accelerate)
```
Note: Pipeline Parallelism (PP) means in this context that each GPU will own
some layers so each GPU will work on a given chunk of data before handing
it off to the next GPU.
```
Now we have a workable `transformers` clean version of the start
working on running this.
Bloom is a 352GB (176B parameters in bf16) model, we need at least that much
GPU RAM to make it fit. We briefly explored offloading to CPU on smaller machines
but the inference speed was orders of magnitude slower so we discarded it.
Then we wanted to basically use the [pipeline](https://huggingface.co/docs/transformers/v4.22.2/en/pipeline_tutorial#pipeline-usage).
So it's dogfooding and this is what the API uses under the hood all the time.
However `pipelines` are not distributed aware (it's not their goal). After briefly
discussing options, we ended up using [accelerate](https://github.com/huggingface/accelerate/) newly
created `device_map="auto"` to manage the sharding of the model. We had to iron
out a few bugs, and fix the `transformers` code a bit to help `accelerate` do the right job.
It works by splitting the various layers of the transformers and giving part of
the model to each GPU. So GPU0 gets to work, then hands it over to GPU1 so on
and so forth.
In the end, with a small HTTP server on top, we could start serving bloom (the big model) !!
## Starting point
But we haven't even started discussing optimizations yet!
We actually have quite a bit, all this process is a castle of cards. During
optimizations we are going to make modifications to the underlying code, being
extra sure you're not killing the model in one way or the other is really important
and easier to do than you think.
So we are now at the very first step of optimizations and we need to start measuring
and keep measuring performance. So we need to consider what we care about.
For an open inference server supporting many options, we expect users to send
many queries with different parameters and what we care about are:
The number of users we can serve at the same time (throughput)
How long does it take for an average user to be served (latency)?
We made a testing script in [locust](https://locust.io/) which is exactly this:
```python
from locust import HttpUser, between, task
from random import randrange, random
class QuickstartUser(HttpUser):
wait_time = between(1, 5)
@task
def bloom_small(self):
sentence = "Translate to chinese. EN: I like soup. CN: "
self.client.post(
"/generate",
json={
"inputs": sentence[: randrange(1, len(sentence))],
"parameters": {"max_new_tokens": 20, "seed": random()},
},
)
@task
def bloom_small(self):
sentence = "Translate to chinese. EN: I like soup. CN: "
self.client.post(
"/generate",
json={
"inputs": sentence[: randrange(1, len(sentence))],
"parameters": {
"max_new_tokens": 20,
"do_sample": True,
"top_p": 0.9,
"seed": random(),
},
},
)
```
**Note: This is not the best nor the only load testing we used, but it was
always the first to be run so that it could compare fairly across approaches.
Being the best on this benchmark does NOT mean it is the best solution. Other
more complex scenarios had to be used in addition to actual real-world performance.
**
We wanted to observe the ramp-up for various implementations and also make sure
that underload the server properly circuit breaked. Circuit breaking means
that the server can answer (fast) that it will not answer your query because too
many people are trying to use it at the same time.
It's extremely important to avoid the hug of death.
On this benchmark the initial performance was (on 16xA100 40Go on GCP which is the machine used throughout):
Requests/s : 0.3 (throughput)
Latency: 350ms/token (latency)
Those numbers are not that great. Before getting to work let's estimate
the best we can imagine achieving.
The formula for amount of operations is `24Bsh^2 + 4𝐵s^2h24Bsh^2 + 4𝐵s^2h` where `B` is
the batch size, `s` the sequence length, and `h` the hidden dimension.
Let's do the math and we are getting `17 TFlop` for a single forward pass.
Looking at the [specs](https://www.nvidia.com/en-us/data-center/a100/) of A100 it claims `312 TFLOPS` for a single card.
That means a single GPU could potentially run at `17 / 312 = 54ms/token`. We're using 16 of those so `3ms/token` on the overall
machine. Take all these numbers with a big grain of salt, it's never possible to reach those numbers,
and real-life performance rarely matches the specs. Also if computation is not your limiting
factor then this is not the lowest you can get. It's just good practice to know how far you are from
your target. In this case, we're 2 orders of magnitude so pretty far. Also, this estimate puts
all the flops at the service of latency which means only a single request can go at a time (it's ok since you're maximizing your machine
so there's not much else to be done, but we can have higher latency and get throughput back through batching much more easily).
## Exploring many routes
```
Note: Tensor Parallelism (TP) means in this context that each GPU will own
part of the weights, so ALL gpus are active all the time and do less work.
Usually this comes with a very slight overhead that some work is duplicated
and more importantly that the GPUs regularly have to communicate to each other
their results to continue the computation
```
Now that we have a good understanding of where we stand it's time to get to work.
We tried many different things based on the people and our various knowledge.
ALL endeavors deserve their own blog post so I'll just list them, explain the
few final learnings and delve into the details of only what went into the current
server. Moving from Pipeline Parallelism (PP) to Tensor Parallelism (TP) is
one big interesting change for latency. Each GPU will own part of the parameters
and all will be working at the same time. So the latency should decrease drastically
but the price to pay is the communication overhead since they regularly need
to communicate with each other about their results.
It is to note that this is a very wide range of approaches and the intent
was deliberately to learn more about each tool and how it could fit in later
endeavors.
### Porting the code the JAX/Flax to run on TPUs:
- Expected to be easier to choose the type of parallelism. so TP should be
easier to test.
It's one of the perks of Jax's design.
- More constrained on hardware, performance on TPU likely superior
than GPU, and less vendor choice for TPU.
- Cons, another port is needed. But it would be welcome anyway in our libs.
Results:
- Porting was not an easy task as some conditions and kernels were hard to
reproduce correctly enough. Still manageable though.
- Parallelism was quite easy to get once ported
Kudos to Jax the claim is alive.
- Ray/communicating with TPU workers proved to be a real pain for us.
We don't know if its the tool, the network, or simply our lack of knowledge
but it slowed down experiments and work much more than we anticipated.
We would launch an experiment that takes 5mn to run, wait for 5mn nothing
had happened, 10mn later still nothing, turned out some worker was down/not responding
we had to manually get in, figure out what went on, fix it, restart something, and relaunch and we had just lost half an hour.
Repeat that enough times, and lost days add up quickly.
Let's emphasize that it's not necessarily a critique of the tools we used
but the subjective experience we had remains.
- No control over compilation
Once we had the thing running, we tried several settings to figure out which
suited best the inference we had in mind, and it turned out it was really hard
to guess from settings what would happen in the latency/throughput. For instance,
we had a 0.3 rps on batch_size=1 (so every request/user is on its own) with a latency of
15ms/token (Do not compare too much with other numbers in this article it's on a different machine with
a very different profile) which is great, but the overall throughput is not much better than
what we had with the old code. So we decided to add batching, and with BS=2 and the
latency went up 5 fold, with only 2 times the throughput... Upon further investigation,
it turned out that up to batch_size=16 every batch_size had the same latency profile.
So we could have 16x more throughput at a 5x latency cost. Not bad, but looking
at the numbers we really would have preferred a more fine-grained control.
The numbers we were aiming for stem from the [100ms, 1s, 10s, 1mn](https://www.nngroup.com/articles/response-times-3-important-limits/) rule.
### Using ONNX/TRT or other compiled approaches
- They are supposed to handle most of the optimization work
- Con, Usually parallelism needs to be handled manually.
Results:
- Turned out that to be able to trace/jit/export stuff we needed to
rework part of the PyTorch, so it easily fused with the pure PyTorch approach
And overall we figured out that we could have most of the optimizations we desired
by staying within PyTorch world, enabling us to keep flexibility without
having to make too much coding effort.
Another thing to note, since we're running on GPU and text-generation has many
forward passes going on, we need the tensors to stay on the GPU, and it is
sometimes hard to send your tensors to some lib, be given back the result, perform
the logits computation (like argmax or sampling) and feed it back again.
Putting the loop within the external lib means losing flexibility just like
Jax, so it was not envisioned in our use case.
### DeepSpeed
- This is the technology that powered training, it seemed only fair to use
it for inference
- Cons, it was never used/prepared for inference before.
Results:
- We had really impressive results fast which are roughly the same as
the last iteration we are currently running.
- We had to invent a way to put a webserver (so dealing with concurrency) on
top of DeepSpeed which also has several processes (one for each GPU). Since
there is an excellent library [Mii](https://github.com/microsoft/DeepSpeed-MII).
It doesn't fit the extremely flexible goals we had in mind, but we probably
would have started working on top of it now. (The current solution is discussed later).
- The biggest caveat we encountered with DeepSpeed, was the lack of stability.
We had issues when running it on CUDA 11.4 where the code was built for 11.6
And the long-standing issue we could never really fix is that there would
be regular kernel crashes (Cuda illegal access, dimensions mismatch, etc..).
We fixed a bunch of these but we could never quite achieve stability under stress
of our webserver. Despite, that I want to shout out to the Microsoft folks that
helped us, we had a really good conversation that improved our understanding
of what was happening, and gave us real insights to do some follow-up works.
- One of the pain points I feel is that our team is mostly in Europe, while
Microsoft is in California, so the collaboration was tricky timewise and we
probably lost a big chunk of time because of it. This has nothing to do
with the technical part, but it's good to acknowledge that the organizational
part of working together is also really important.
- Another thing to note, is that DeepSpeed relies on `transformers` to inject
its optimization, and since we were updating our code pretty much consistently
it made it hard for the DeepSpeed team to keep things working on our `main`
branch. We're sorry to have made it hard, I guess this is why it's called
bleeding edge.
### Webserver ideas
- Given that we are going to run a free server where users are going to
send long text, short text, want a few tokens, or a whole recipe each with
different parameters, something had to be done here.
Results:
- We recoded everything in `Rust` with the excellent bindings [tch-rs](https://github.com/LaurentMazare/tch-rs). Rust was not aimed at having performance gains but just
much more fine-grained control over parallelism (threads/processes) and playing
more fine-grained on the webserver concurrency and the PyTorch one.
Python is infamously hard to handle low-level details thanks to the [GIL](https://realpython.com/python-gil/).
- Turned out that most of the pain came from the port, and after that, the experimentation
was a breeze. And we figured that with enough control over the loops
we could have great performance for everyone even in the context of a very
wide array of requests with different properties. [Code](https://github.com/Narsil/bloomserver) for the curious, but it doesn't come with any support or nice docs.
- It became production for a few weeks because it was more lenient on the parallelism, we could use the GPUs more efficiently (using GPU0 for request 1
while GPU1 is treating request 0).
and we
went from 0.3 RPS to ~2.5 RPS with the same latency. The optimal case would have been to increase throughput by 16X but the numbers shown here
are real workloads measurements so this is not too bad.
### Pure PyTorch
- Purely modify the existing code to make it faster by removing operations
like `reshape`, using better-optimized kernels so on and so forth.
- Con, we have to code TP ourselves and we have a constraint that the code still fits our library (mostly).
Results
- Next chapter.
## Final route: PyTorch + TP + 1 custom kernel + torch.jit.script
### Writing more efficient PyTorch
The first item on the list was removing unnecessary operations in the first implementations
Some can be seen by just looking at the code and figuring out obvious flaws:
- Alibi is used in Bloom to add position embeddings and it was calculated in too
many places, we could only calculate it once and more efficiently.
The old code: [link](https://github.com/huggingface/transformers/blob/ca2a55e9dfb245527b5e1c954fec6ffbb7aef07b/src/transformers/models/bloom/modeling_bloom.py#L94-L132)
The new code: [link](https://github.com/huggingface/transformers/blob/main/src/transformers/models/bloom/modeling_bloom.py#L86-L127)
This is a 10x speedup and the latest version includes padding too!
Since this step is only computed once, the actual speed is not important
but overall reducing the number of operations and tensor creation is a good direction.
Other parts come out more clearly when you start [profiling](https://pytorch.org/tutorials/recipes/recipes/profiler_recipe.html) and we used quite extensively the [tensorboard extension](https://pytorch.org/tutorials/intermediate/tensorboard_profiler_tutorial.html)
This provides this sort of image which give insights:
<img src="assets/bloom-inference-optimization/profiler_simple.png">
Attention takes a lot of time, careful this is a CPU view so the long
bars don't mean long, they mean the CPU is awaiting the GPU results of the
previous step.
<img src="assets/bloom-inference-optimization/profiler.png">
We see many `cat` operations before `baddbmm`.
Removing a lot of reshape/transpose, for instance, we figured out that:
- The attention is the hot path (it's expected but always good to verify).
- In the attention, a lot of kernels were actual copies due to the massive amount of reshapes
- We **could** remove the reshapes by reworking the weights themselves and the past.
This is a breaking change but it did improve performance quite a bit!
### Supporting TP
Ok, we have removed most of the low-hanging fruits now we went roughly from 350ms/token
latency to 300ms/token in PP. That's a 15% reduction in latency, but it actually provided
more than that, but we were not extremely rigorous in our measuring initially so let's stick to that figure.
Then we went on to provide a TP implementation. Turned out to be much faster
than we anticipated the implementation took half a day of a single (experienced) dev.
The result is [here](https://github.com/huggingface/transformers/tree/thomas/dirty_bloom_tp/src/transformers/models/bloom). We were also able to reuse code from other projects which helped.
The latency went directly from 300ms/token to 91ms/token which is a huge improvement in user experience.
A simple 20 tokens request went from 6s to 2s which went from a "slow" experience to slightly delayed.
Also, the throughput went up a lot to 10RPS. The throughput comes from the fact
that running a query in batch_size=1 takes the same time as batch_size=32
and throughput becomes essentially *free* in latency cost at this point.
### Low-hanging fruits
Now that we had a TP implementation, we could start profiling and optimizing again.
It's a significant enough shift that we had to start from scratch again.
The first thing that stood out, is that synchronization (ncclAllReduce) starts
to become a preponderant part of the load, which is expected, this is the synchronization
part and it **is** taking some time. We never tried to look and optimize this as it's
already using `nccl` but there might still be some room for improvement there.
We assumed it would be hard to do much better.
The second thing is that `Gelu` operator was launching many elementwise
kernels and overall it was taking a bigger share of compute than we expected.
We made the change from:
```python
def bloom_gelu_forward(x):
return x * 0.5 * (1.0 + torch.tanh(0.79788456 * x * (1 + 0.044715 * x * x)))
```
to
```python
@torch.jit.script
def bloom_gelu_forward(x):
return x * 0.5 * (1.0 + torch.tanh(0.79788456 * x * (1 + 0.044715 * x * x)))
```
This transforms the operations from multiple small element-wise kernels (and hence tensor copies)
to a single kernel operation!
This provided a 10% latency improvement from 91ms/token to 81ms/token, right there!
Be careful though, this is not some magic black box you can just throw everywhere,
the kernel fusion will not necessarily happen or the previously used operations
are already extremely efficient.
Places where we found it worked well:
- You have a lot of small/elementwise operations
- You have a hotspot with a few hard-to-remove reshape, copies in general
- When the fusion happens.
### Epic fail
We also had some points, during our testing periods, where we ended up seeing some consistent
25% lower latency for the Rust server compared to the Python one. This was rather
odd, but because it was consistently measured, and because removing kernels provided a speed
up, we were under the impression that maybe dropping the Python overhead could
provide a nice boost.
We started a 3-day job to reimplement the necessary parts of `torch.distributed`
To get up and running in the Rust world [nccl-rs](https://github.com/Narsil/nccl-rs).
We had the version working but something was off in the generations compared to its
Python counterpart. During the investigation of the issues, we figured...
**that we had forgotten to remove the profiler in the Pytorch measurements**...
That was the epic fail because removing it gave us back the 25% and then both
codes ran just as fast. This is what we initially expected, that python mustn't
be a performance hit, since it's mostly running torch cpp's code. In the end,
3 days is not the end of the world, and it might become useful sometime in the
future but still pretty bad.
This is quite common when doing optimizations to do wrong or misrepresentative
measurements which end up being disappointing or even detrimental to the overall
product. This is why doing it in small steps and having expectations about the
outcome as soon as possible helps contain that risk.
Another place where we had to be extra careful, was the initial forward pass (without
past) and the later forward passes (with past). If you optimize the first one,
you're most certainly going to be slowing down the later ones which are much more
important and account for most of the runtime.
Another pretty common culprit is measuring times which are CPU times, and not
actual CUDA times, so you need to `torch.cuda.synchronize()` when doing
runs to be sure that the kernels complete.
### Custom kernel
So far, we had achieved close to DeepSpeed performance without any custom code
outside of PyTorch! Pretty neat. We also didn't have to make any compromise
on the flexibility of the run time batch size!
But given the DeepSpeed experience, we wanted to try and write a custom kernel
to fuse a few operations in the hot path where `torch.jit.script` wasn't able to
do it for us. Essentially the following two lines:
```python
attn_weights = attention_scores.masked_fill_(attention_mask, torch.finfo(attention_scores.dtype).min)
attention_probs = F.softmax(attn_weights, dim=-1, dtype=torch.float32).to(input_dtype)
```
The first masked fill is creating a new tensor, which is here only to
say to the softmax operator to ignore those values. Also, the softmax needs to be calculated
on float32 (for stability) but within a custom kernel, we could limit the amount of
upcasting necessary so we limit them to the actual sums and accumulated needed.
Code can be found [here](https://github.com/huggingface/transformers/blob/thomas/add_custom_kernels/src/transformers/models/bloom/custom_kernels/fused_bloom_attention_cuda.cu).
Keep in mind we had a single GPU architecture to target so we could focus on this
and we are not experts (yet) at writing kernels, so there could be better ways
to do this.
This custom kernel provided yet another 10% latency increase moving down from
81ms/token to 71ms/token latency. All the while keeping our flexibility.
After that, we investigated and explored other things like fusing more operators
removing other reshapes, or putting them in other places. But no attempt ever made
a significant enough impact to make it to the final versions.
### Webserver part
Just like the Rust counterpart, we had to implement the batching of requests
with different parameters. Since we were in the `PyTorch` world, we have pretty
much full control of what's going on.
Since we're in Python, we have the limiting factor that the `torch.distributed`
needs to run on several processes instead of threads, which means it's slightly
harder to communicate between processes. In the end, we opted to communicate
raw strings over a Redis pub/sub to distribute the requests to all processes at once.
Since we are in different processes it's easier to do it that way than communicating
tensors (which are way bigger) for instance.
Then we had to drop the use [generate](https://huggingface.co/docs/transformers/v4.22.2/en/main_classes/text_generation#transformers.generation_utils.GenerationMixin.generate) since
this applies the parameters to all members of the batch, and we actually
want to apply a different set of parameters.
Thankfully, we can reuse lower-level items like the [LogitsProcessor](https://huggingface.co/docs/transformers/internal/generation_utils#transformers.LogitsProcessor)
to save us a lot of work.
So we reconstructed a `generate` function that takes a list of parameters
and applies them to each member of the batch.
Another really important aspect of the final UX is latency.
Since we have different parameter sets for different requests, we might have
1 request for 20 tokens and the other for 250 tokens. Since it takes
75ms/token latency one request takes 1.5s and the other 18s. If we were
batching all the way, we would be making the user that asked to wait for 18s
and making it appear to him as if we were running at 900ms/token which is quite slow!
Since we're in a PyTorch world with extreme flexibility, what we can do instead
is extract from the batch the first request as soon as we generated to first 20
tokens, and return to that user within the requested 1.5s! We also happen to save 230 tokens worth of computation.
So flexibility **is** important to get the best possible latency out there.
## Last notes and crazy ideas
Optimization is a never-ending job, and like any other project, 20% of work
will usually yield 80% of the results.
At some point, we started having a small testing strategy to figure out
potential yields of some idea we had, and if the tests didn't yield significant
results then we discarded the idea. 1 day for a 10% increase is valuable enough, 2 weeks for 10X
is valuable enough. 2 weeks for 10% is not so interesting.
### Have you tried ...?
Stuff we know exists and haven't used because of various reasons. It
could be it felt like it wasn't adapted to our use case, it was too much
work, the yields weren't promising enough, or even simply we had too many
options to try out from and discarded some for no particular reasons and just
lack of time. The following are in no particular order:
- [Cuda graphs](https://developer.nvidia.com/blog/cuda-graphs/)
- [nvFuser](https://pytorch.org/tutorials/intermediate/nvfuser_intro_tutorial.html) (This is what powers `torch.jit.script` so we did use it.)
- [FasterTransformer](https://github.com/NVIDIA/FasterTransformer)
- [Nvidia's Triton](https://developer.nvidia.com/nvidia-triton-inference-server)
- [XLA](https://www.tensorflow.org/xla) (Jax is using xla too !)
- [torch.fx](https://pytorch.org/docs/stable/fx.html)
- [TensorRT](https://developer.nvidia.com/blog/accelerating-inference-up-to-6x-faster-in-pytorch-with-torch-tensorrt/)
Please feel free to reach out if your favorite tool is missing from
here or if you think we missed out on something important that could
prove useful!
### [Flash attention](https://github.com/HazyResearch/flash-attention)
We have briefly looked at integrating flash attention, and while it performs extremely
well on the first forward pass (without `past_key_values`) it didn't yield as big improvements
when running when using `past_key_values`. Since we needed to adapt it to include the `alibi` tensor
in the calculation we decide to not do the work (at least not yet).
### [OpenAI Triton](https://openai.com/blog/triton/)
[Triton](https://github.com/openai/triton) is a great framework for building custom kernels
in Python. We want to get to use it more but we haven't so far. We would
be eager to see if it performs better than our Cuda kernel. Writing directly in
Cuda seemed like the shortest path for our goal when we considered our options
for that part.
### Padding and Reshapes
As mentioned throughout this article, every tensor copy has a cost and another
hidden cost of running production is padding. When two queries come in with very
different lengths, you have to pad (use a dummy token) to make them fit a square.
This leads to maybe a lot of unnecessary calculations. [More information](https://huggingface.co/docs/transformers/v4.22.2/en/main_classes/pipelines#pipeline-batching).
Ideally, we would be able to *not* do those calculations at all, and never have reshapes.
Tensorflow has the concept of [RaggedTensor](https://www.tensorflow.org/guide/ragged_tensor) and
Pytorch [Nested tensors](https://pytorch.org/docs/stable/nested.html). Both of these
seem not as streamlined as regular tensors but might enable us to do less computation
which is always a win.
In an ideal world, the entire inference would be written in CUDA or pure GPU implementation.
Considering the performance improvements yielded when we could fuse operations it looks desirable.
But to what extent this would deliver, we have no idea. If smarter GPU people have
ideas we are listening!
## Acknowledgments
All this work results of the collaboration of many HF team members. In no particular
order, [@ThomasWang](https://huggingface.co/TimeRobber) [@stas](https://huggingface.co/stas)
[@Nouamane](https://huggingface.co/nouamanetazi) [@Suraj](https://huggingface.co/valhalla)
[@Sanchit](https://huggingface.co/sanchit-gandhi) [@Patrick](https://huggingface.co/patrickvonplaten)
[@Younes](/ybelkada) [@Sylvain](https://huggingface.co/sgugger)
[@Jeff (Microsoft)](https://github.com/jeffra) [@Reza](https://github.com/RezaYazdaniAminabadi)
And all the [BigScience](https://huggingface.co/bigscience) organization.
| 6 |
0 | hf_public_repos | hf_public_repos/blog/synthetic-data-save-costs.md | ---
title: "Synthetic data: save money, time and carbon with open source"
thumbnail: /blog/assets/176_synthetic-data-save-costs/thumbnail.png
authors:
- user: MoritzLaurer
---
# Synthetic data: save money, time and carbon with open source <!-- omit in toc -->
## tl;dr <!-- omit in toc -->
Should you fine-tune your own model or use an LLM API? Creating your own model puts you in full control but requires expertise in data collection, training, and deployment. LLM APIs are much easier to use but force you to send your data to a third party and create costly dependencies on LLM providers. This blog post shows how you can combine the convenience of LLMs with the control and efficiency of customized models.
In a case study on identifying investor sentiment in the news, we show how to use an open-source LLM to create synthetic data to train your customized model in a few steps. Our resulting custom RoBERTa model can analyze a large news corpus for around $2.7 compared to $3061 with GPT4; emits around 0.12 kg CO2 compared to very roughly 735 to 1100 kg CO2 with GPT4; with a latency of 0.13 seconds compared to often multiple seconds with GPT4; while performing on par with GPT4 at identifying investor sentiment (both 94% accuracy and 0.94 F1 macro). We provide [reusable notebooks](https://github.com/MoritzLaurer/synthetic-data-blog/tree/main), which you can apply to your own use cases.
<p align="center">
<img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/blog/176_synthetic-data-save-costs/table_pros_cons.png" alt="table_pros_cons" width=95%>
</p>
## Table of Contents <!-- omit in toc -->
- [1. The problem: There is no data for your use-case](#1-the-problem-there-is-no-data-for-your-use-case)
- [2. The solution: Synthetic data to teach efficient students](#2-the-solution-synthetic-data-to-teach-efficient-students)
- [3. Case study: Monitoring financial sentiment](#3-case-study-monitoring-financial-sentiment)
- [3.1 Prompt an LLM to annotate your data](#31-prompt-an-llm-to-annotate-your-data)
- [3.2 Compare the open-source model to proprietary models](#32-compare-the-open-source-model-to-proprietary-models)
- [3.3 Understand and validate your (synthetic) data](#33-understand-and-validate-your-synthetic-data)
- [3.3 Tune your efficient \& specialized model with AutoTrain](#33-tune-your-efficient--specialized-model-with-autotrain)
- [3.4 Pros and cons of different approaches](#34-pros-and-cons-of-different-approaches)
- [Conclusion](#conclusion)
## 1. The problem: There is no data for your use-case
Imagine your boss asking you to build a sentiment analysis system for your company. You will find 100,000+ datasets on the Hugging Face Hub, 450~ of which have the word “sentiment” in the title, covering sentiment on Twitter, in poems, or in Hebrew. This is great, but if, for example, you work in a financial institution and you need to track sentiment towards the specific brands in your portfolio, none of these datasets are useful for your task. With the millions of tasks companies could tackle with machine learning, it’s unlikely that someone already collected and published data on the exact use case your company is trying to solve.
Given this lack of task-specific datasets and models, many people turn to general-purpose LLMs. These models are so large and general that they can tackle most tasks out of the box with impressive accuracy. Their easy-to-use APIs eliminate the need for expertise in fine-tuning and deployment. Their main disadvantages are size and control: with hundreds of billions or trillions of parameters, these models are inefficient and only run on compute clusters controlled by a few companies.
## 2. The solution: Synthetic data to teach efficient students
In 2023, one development fundamentally changed the machine-learning landscape: LLMs started reaching parity with human data annotators. There is now ample evidence showing that the best LLMs outperform crowd workers and are reaching parity with experts in creating quality (synthetic) data (e.g. [Zheng et al. 2023](https://arxiv.org/pdf/2306.05685.pdf), [Gilardi et al. 2023](https://arxiv.org/pdf/2303.15056.pdf), [He et al. 2023](https://arxiv.org/pdf/2303.16854.pdf)). It is hard to overstate the importance of this development. The key bottleneck for creating tailored models was the money, time, and expertise required to recruit and coordinate human workers to create tailored training data. With LLMs starting to reach human parity, high-quality annotation labor is now available through APIs; reproducible annotation instructions can be sent as prompts; and synthetic data is returned almost instantaneously with compute as the only bottleneck.
In 2024, this approach will become commercially viable and boost the value of open-source for small and large businesses. For most of 2023, commercial use of LLMs for annotation labor was blocked due to restrictive business terms by LLM API providers. With models like [Mixtral-8x7B-Instruct-v0.1](https://huggingface.co/mistralai/Mixtral-8x7B-Instruct-v0.1) by [Mistral](https://mistral.ai/), LLM annotation labor and synthetic data now become open for commercial use. [Mixtral performs on par with GPT3.5](https://huggingface.co/spaces/lmsys/chatbot-arena-leaderboard), and thanks to its Apache 2.0 license, its synthetic data outputs can be used as training data for smaller, specialized models (the “students”) for commercial use-cases. This blog post provides an example of how this will significantly speed up the creation of your own tailored models while drastically reducing long-term inference costs.
## 3. Case study: Monitoring financial sentiment
Imagine you are a developer in a large investment firm tasked with monitoring economic news sentiment toward companies in your investment portfolio. Until recently, you had two main options:
1. You could fine-tune your own model. This requires writing annotation instructions, creating an annotation interface, recruiting (crowd) workers, introducing quality assurance measures to handle low-quality data, fine-tuning a model on this data, and deploying it.
2. Or you could send your data with instructions to an LLM API. You skip fine-tuning and deployment entirely, and you reduce the data analysis process to writing instructions (prompts), which you send to an “LLM annotator” behind an API. In this case, the LLM API is your final inference solution and you use the LLM's outputs directly for your analysis.
Although Option 2 is more expensive at inference time and requires you to send sensitive data to a third party, it is significantly easier to set up than Option 1 and, therefore, used by many developers.
In 2024, synthetic data provides a third option: combining the cost benefits of Option 1 with the ease-of-use of Option 2. Simply put, you can use an LLM (the “teacher”) to annotate a small sample of data for you, and then you fine-tune a smaller, more efficient LM (the “student”) on this data. This approach can be implemented in a few simple steps.
### 3.1 Prompt an LLM to annotate your data
We use the [financial_phrasebank](https://huggingface.co/datasets/financial_phrasebank) sentiment dataset as a running example, but you can adapt the code for any other use case. The financial_phrasebank task is a 3-class classification task, where 16 experts annotated sentences from financial news on Finnish companies as “positive” / “negative” / “neutral” from an investor perspective ([Malo et al. 2013](https://arxiv.org/pdf/1307.5336.pdf)). For example, the dataset contains the sentence “For the last quarter of 2010, Componenta's net sales doubled to EUR131m from EUR76m for the same period a year earlier”, which was categorized as “positive” from an investor perspective by annotators.
We start by installing a few required libraries.
```python
!pip install datasets # for loading the example dataset
!pip install huggingface_hub # for secure token handling
!pip install requests # for making API requests
!pip install scikit-learn # for evaluation metrics
!pip install pandas # for post-processing some data
!pip install tqdm # for progress bars
```
We can then download the example dataset with its expert annotations.
```python
from datasets import load_dataset
dataset = load_dataset("financial_phrasebank", "sentences_allagree", split='train')
# create a new column with the numeric label verbalised as label_text (e.g. "positive" instead of "0")
label_map = {
i: label_text
for i, label_text in enumerate(dataset.features["label"].names)
}
def add_label_text(example):
example["label_text"] = label_map[example["label"]]
return example
dataset = dataset.map(add_label_text)
print(dataset)
# Dataset({
# features: ['sentence', 'label', 'label_text'],
# num_rows: 2264
#})
```
Now we write a short annotation instruction tailored to the `financial_phrasebank` task and format it as an LLM prompt. This prompt is analogous to the instructions you normally provide to crowd workers.
```python
prompt_financial_sentiment = """\
You are a highly qualified expert trained to annotate machine learning training data.
Your task is to analyze the sentiment in the TEXT below from an investor perspective and label it with only one the three labels:
positive, negative, or neutral.
Base your label decision only on the TEXT and do not speculate e.g. based on prior knowledge about a company.
Do not provide any explanations and only respond with one of the labels as one word: negative, positive, or neutral
Examples:
Text: Operating profit increased, from EUR 7m to 9m compared to the previous reporting period.
Label: positive
Text: The company generated net sales of 11.3 million euro this year.
Label: neutral
Text: Profit before taxes decreased to EUR 14m, compared to EUR 19m in the previous period.
Label: negative
Your TEXT to analyse:
TEXT: {text}
Label: """
```
Before we can pass this prompt to the API, we need to add some formatting to the prompt. Most LLMs today are fine-tuned with a specific chat template. This template consists of special tokens, which enable LLMs to distinguish between the user's instructions, the system prompt, and its own responses in a chat history. Although we are not using the model as a chat bot here, omitting the chat template can still lead to silently performance degradation. You can use the `tokenizer` to add the special tokens of the model's chat template automatically (read more [here](https://huggingface.co/blog/chat-templates)). For our example, we use the `Mixtral-8x7B-Instruct-v0.1` model.
```python
from transformers import AutoTokenizer
tokenizer = AutoTokenizer.from_pretrained("mistralai/Mixtral-8x7B-Instruct-v0.1")
chat_financial_sentiment = [{"role": "user", "content": prompt_financial_sentiment}]
prompt_financial_sentiment = tokenizer.apply_chat_template(chat_financial_sentiment, tokenize=False)
# The prompt now includes special tokens: '<s>[INST] You are a highly qualified expert ... [/INST]'
```
The formatted annotation instruction (prompt) can now be passed to the LLM API. We use the free Hugging Face [serverless Inference API](https://huggingface.co/docs/api-inference/index). The API is ideal for testing popular models. Note that you might encounter rate limits if you send too much data to the free API, as it is shared among many users. For larger workloads, we recommend creating a [dedicated Inference Endpoint](https://huggingface.co/docs/inference-endpoints/index). A dedicated Inference Endpoint is essentially your own personal paid API, which you can flexibly turn on and off.
We login with the `huggingface_hub` library to easily and safely handle our API token. Alternatively, you can also define your token as an environment variable (see the [documentation](https://huggingface.co/docs/huggingface_hub/quick-start#authentication)).
```python
# you need a huggingface account and create a token here: https://huggingface.co/settings/tokens
# we can then safely call on the token with huggingface_hub.get_token()
import huggingface_hub
huggingface_hub.login()
```
We then define a simple `generate_text` function for sending our prompt and data to the API.
```python
import os
import requests
# Choose your LLM annotator
# to find available LLMs see: https://huggingface.co/docs/huggingface_hub/main/en/package_reference/inference_client#huggingface_hub.InferenceClient.list_deployed_models
API_URL = "https://api-inference.huggingface.co/models/mistralai/Mixtral-8x7B-Instruct-v0.1"
# docs on different parameters: https://huggingface.co/docs/api-inference/detailed_parameters#text-generation-task
generation_params = dict(
top_p=0.90,
temperature=0.8,
max_new_tokens=128,
return_full_text=False,
use_cache=False
)
def generate_text(prompt=None, generation_params=None):
payload = {
"inputs": prompt,
"parameters": {**generation_params}
}
response = requests.post(
API_URL,
headers={"Authorization": f"Bearer {huggingface_hub.get_token()}"},
json=payload
)
return response.json()[0]["generated_text"]
```
As the LLM might not always return the labels in exactly the same harmonized format, we also define a short `clean_output` function, which maps the string output from the LLM to our three possible labels.
```python
labels = ["positive", "negative", "neutral"]
def clean_output(string, random_choice=True):
for category in labels:
if category.lower() in string.lower():
return category
# if the output string cannot be mapped to one of the categories, we either return "FAIL" or choose a random label
if random_choice:
return random.choice(labels)
else:
return "FAIL"
```
We can now send our texts to the LLM for annotation. The code below sends each text to the LLM API and maps the text output to our three clean categories. Note: iterating over each text and sending them to an API separately is inefficient in practice. APIs can process multiple texts simultaneously, and you can significantly speed up your API calls by sending batches of text to the API asynchronously. You can find optimized code in the [reproduction repository](https://github.com/MoritzLaurer/synthetic-data-blog/tree/main) of this blog post.
```python
output_simple = []
for text in dataset["sentence"]:
# add text into the prompt template
prompt_formatted = prompt_financial_sentiment.format(text=text)
# send text to API
output = generate_text(
prompt=prompt_formatted, generation_params=generation_params
)
# clean output
output_cl = clean_output(output, random_choice=True)
output_simple.append(output_cl)
```
Based on this output, we can now calculate metrics to see how accurately the model did the task without being trained on it.
```python
from sklearn.metrics import classification_report
def compute_metrics(label_experts, label_pred):
# classification report gives us both aggregate and per-class metrics
metrics_report = classification_report(
label_experts, label_pred, digits=2, output_dict=True, zero_division='warn'
)
return metrics_report
label_experts = dataset["label_text"]
label_pred = output_simple
metrics = compute_metrics(label_experts, label_pred)
```
Based on the simple prompt, the LLM correctly classified 91.6% of texts (0.916 accuracy and 0.916 F1 macro). That’s pretty good, given that it was not trained to do this specific task.
We can further improve this by using two simple prompting techniques: Chain-of-Thought (CoT) and Self-Consistency (SC). CoT asks the model to first reason about the correct label and then take the labeling decision instead of immediately deciding on the correct label. SC means sending the same prompt with the same text to the same LLM multiple times. SC effectively gives the LLM multiple attempts per text with different reasoning paths, and if the LLM then responds “positive” twice and “neutral” once, we choose the majority (”positive”) as the correct label. Here is our updated prompt for CoT and SC:
```python
prompt_financial_sentiment_cot = """\
You are a highly qualified expert trained to annotate machine learning training data.
Your task is to briefly analyze the sentiment in the TEXT below from an investor perspective and then label it with only one the three labels:
positive, negative, neutral.
Base your label decision only on the TEXT and do not speculate e.g. based on prior knowledge about a company.
You first reason step by step about the correct label and then return your label.
You ALWAYS respond only in the following JSON format: {{"reason": "...", "label": "..."}}
You only respond with one single JSON response.
Examples:
Text: Operating profit increased, from EUR 7m to 9m compared to the previous reporting period.
JSON response: {{"reason": "An increase in operating profit is positive for investors", "label": "positive"}}
Text: The company generated net sales of 11.3 million euro this year.
JSON response: {{"reason": "The text only mentions financials without indication if they are better or worse than before", "label": "neutral"}}
Text: Profit before taxes decreased to EUR 14m, compared to EUR 19m in the previous period.
JSON response: {{"reason": "A decrease in profit is negative for investors", "label": "negative"}}
Your TEXT to analyse:
TEXT: {text}
JSON response: """
# we apply the chat template like above
chat_financial_sentiment_cot = [{"role": "user", "content": prompt_financial_sentiment_cot}]
prompt_financial_sentiment_cot = tokenizer.apply_chat_template(chat_financial_sentiment_cot, tokenize=False)
# The prompt now includes special tokens: '<s>[INST] You are a highly qualified expert ... [/INST]'
```
This is a JSON prompt where we ask the LLM to return a structured JSON string with its “reason” as one key and the “label” as another key. The main advantage of JSON is that we can parse it to a Python dictionary and then extract the “label”. We can also extract the “reason” if we want to understand the reasoning why the LLM chose this label.
The `process_output_cot` function parses the JSON string returned by the LLM and, in case the LLM does not return valid JSON, it tries to identify the label with a simple string match from our `clean_output` function defined above.
```python
import ast
def process_output_cot(output):
try:
output_dic = ast.literal_eval(output)
return output_dic
except Exception as e:
# if json/dict parse fails, do simple search for occurrence of first label term
print(f"Parsing failed for output: {output}, Error: {e}")
output_cl = clean_output(output, random_choice=False)
output_dic = {"reason": "FAIL", "label": output_cl}
return output_dic
```
We can now reuse our `generate_text` function from above with the new prompt, process the JSON Chain-of-Thought output with `process_output_cot` and send each prompt multiple times for Self-Consistency.
```python
self_consistency_iterations = 3
output_cot_multiple = []
for _ in range(self_consistency_iterations):
output_lst_step = []
for text in tqdm(dataset["sentence"]):
prompt_formatted = prompt_financial_sentiment_cot.format(text=text)
output = generate_text(
prompt=prompt_formatted, generation_params=generation_params
)
output_dic = process_output_cot(output)
output_lst_step.append(output_dic["label"])
output_cot_multiple.append(output_lst_step)
```
For each text, we now have three attempts by our LLM annotator to identify the correct label with three different reasoning paths. The code below selects the majority label from the three paths.
```python
import pandas as pd
from collections import Counter
def find_majority(row):
# Count occurrences
count = Counter(row)
# Find majority
majority = count.most_common(1)[0]
# Check if it's a real majority or if all labels are equally frequent
if majority[1] > 1:
return majority[0]
else: # in case all labels appear with equal frequency
return random.choice(labels)
df_output = pd.DataFrame(data=output_cot_multiple).T
df_output['label_pred_cot_multiple'] = df_output.apply(find_majority, axis=1)
```
Now, we can compare our improved LLM labels with the expert labels again and calculate metrics.
```python
label_experts = dataset["label_text"]
label_pred_cot_multiple = df_output['label_pred_cot_multiple']
metrics_cot_multiple = compute_metrics(label_experts, label_pred_cot_multiple)
```
CoT and SC increased performance to 94.0% accuracy and 0.94 F1 macro. We have improved performance by giving the model time to think about its label decision and giving it multiple attempts. Note that CoT and SC cost additional compute. We are essentially buying annotation accuracy with compute.
<p align="center">
<img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/blog/176_synthetic-data-save-costs/fig_mixtral.png" alt="fig_mixtral" width=95%>
</p>
We have now created a synthetic training dataset thanks to these simple LLM API calls. We have labeled each text by making the LLM try three different reasoning paths before taking the label decision. The result are labels with high agreement with human experts and a good quality dataset we can use for training a more efficient and specialized model.
```python
df_train = pd.DataFrame({
"text": dataset["sentence"],
"labels": df_output['label_pred_cot_multiple']
})
df_train.to_csv("df_train.csv")
```
Note that in the [full reproduction script](https://github.com/MoritzLaurer/synthetic-data-blog/tree/main) for this blog post, we also create a test split purely based on the expert annotations to assess the quality of all models. All metrics are always based on this human expert test split.
### 3.2 Compare the open-source model to proprietary models
The main advantage of this data created with the open-source Mixtral model is that the data is fully commercially usable without legal uncertainty. For example, data created with the OpenAI API is subject to the [OpenAI Business Terms](https://openai.com/policies/business-terms), which explicitly prohibit using model outputs for training models that compete with their products and services. The legal value and meaning of these Terms are unclear, but they introduce legal uncertainty for the commercial use of models trained on synthetic data from OpenAI models. Any smaller, efficient model trained on synthetic data could be considered as competing, as it reduces dependency on the API service.
How does the quality of synthetic data compare between Mistral’s open-source `Mixtral-8x7B-Instruct-v0.1` and OpenAI’s GPT3.5 and GPT4? We ran the identical pipeline and prompts explained above with `gpt-3.5-turbo-0613` and `gpt-4-0125-preview` and reported the results in the table below. We see that Mixtral performs better than GPT3.5 and is on par with GPT4 for this task, depending on the prompt type. (We don’t display the results for the newer gpt-3.5-turbo-0125 here because, for some reason, the performance with this model was worse than with the older default gpt-3.5-turbo-0613).
<p align="center">
<img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/blog/176_synthetic-data-save-costs/fig_mixtral_gpt.png" alt="fig_mixtral_gpt" width=95%>
</p>
Note that this does not mean Mixtral is always better than GPT3.5 and on par with GPT4. GPT4 performs better on several benchmarks. The main message is that open-source models can now create high-quality synthetic data.
### 3.3 Understand and validate your (synthetic) data
What does all this mean in practice? So far, the result is just data annotated by some black box LLM. We could also only calculate metrics because we have expert annotated reference data from our example dataset. How can we trust the LLM annotations if we do not have expert annotations in a real-world scenario?
In practice, whatever annotator you use (human annotators or LLMs), you can only trust data you have validated yourself. Instructions/prompts always contain a degree of ambiguity. Even a perfectly intelligent annotator can make mistakes and must make unclear decisions when faced with often ambiguous real-world data.
Fortunately, data validation has become significantly easier over the past years with open-source tools: [Argilla](https://argilla.io/) provides a free interface for validating and cleaning unstructured LLM outputs; [LabelStudio](https://labelstud.io/) enables you to annotate data in many modalities; and [CleanLab](https://cleanlab.ai/) provides an interface for annotating and automatically cleaning structured data; for quick and simple validation, it can also be fine to just annotate in a simple Excel file.
It's essential to spend some time annotating texts to get a feel for the data and its ambiguities. You will quickly learn that the model made some mistakes, but there will also be several examples where the correct label is unclear and some texts where you agree more with the decision of the LLM than with the experts who created the dataset. These mistakes and ambiguities are a normal part of dataset creation. In fact, there are actually only very few real-world tasks where the human expert baseline is 100% agreement. It's an old insight recently "rediscovered" by the machine learning literature that human data is a faulty gold standard ([Krippendorf 2004](https://books.google.de/books/about/Content_Analysis.html?id=q657o3M3C8cC&redir_esc=y), [Hosking et al. 2024](https://arxiv.org/pdf/2309.16349.pdf)).
After less than an hour in the annotation interface, we gained a better understanding of our data and corrected some mistakes. For reproducibility and to demonstrate the quality of purely synthetic data, however, we continue using the uncleaned LLM annotations in the next step.
### 3.3 Tune your efficient & specialized model with AutoTrain
So far, this has been a standard workflow of prompting an LLM through an API and validating the outputs. Now comes the additional step to enable significant resource savings: we fine-tune a smaller, more efficient, and specialized LM on the LLM's synthetic data. This process is also called "distillation", where the output from a larger model (the "teacher") is used to train a smaller model (the “student”). While this sounds fancy, it essentially only means that we take our original `text` from the dataset and treat the predictions from the LLM as our `labels` for fine-tuning. If you have trained a classifier before, you know that these are the only two columns you need to train a classifier with `transformers`, `sklearn`, or any other library.
We use the Hugging Face [AutoTrain](https://huggingface.co/autotrain) solution to make this process even easier. AutoTrain is a no-code interface that enables you to upload a `.csv` file with labeled data, which the service then uses to fine-tune a model for you automatically. This removes the need for coding or in-depth fine-tuning expertise for training your own model.
On the Hugging Face website, we first click on "Spaces" at the top and then "Create new Space". We then select "Docker" > "AutoTrain" and choose a small A10G GPU, which costs $1.05 per hour. The Space for AutoTrain will then initialize. We can then upload our synthetic training data and expert test data via the interface and adjust the different fields, as shown in the screenshot below. Once everything is filled in, we can click on "Start Training" and you can follow the training process in the Space's logs. Training a small RoBERTa-base model (~0.13 B parameters) on just 1811 data points is very fast and should not take more than a few minutes. Once training is done, the model is automatically uploaded to your HF profile. The Space stops once training is finished, and the whole process should take at most 15 minutes and cost less than $1.
<p align="center">
<img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/blog/176_synthetic-data-save-costs/autotrain.png" alt="autotrain" width=95%>
</p>
If you want, you can also use AutoTrain entirely locally on your own hardware, see our [documentation](https://huggingface.co/docs/autotrain/index). Advanced users can, of course, always write their own training scripts, but with these default hyperparameters, the results with AutoTrain should be sufficient for many classification tasks.
How well does our resulting fine-tuned ~0.13B parameter RoBERTa-base model perform compared to much larger LLMs? The bar chart below shows that the custom model fine-tuned on 1811 texts achieves 94% accuracy - the same as its teacher Mixtral and GPT4! A small model could never compete with a much larger LLM out-of-the-box, but fine-tuning it on some high-quality data brings it to the same level of performance for the task it is specialized in.
<p align="center">
<img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/blog/176_synthetic-data-save-costs/fig_mixtral_gpt_roberta.png" alt="fig_mixtral_gpt_roberta" width=95%>
</p>
### 3.4 Pros and cons of different approaches
What are the overall pros and cons of the three approaches we discussed in the beginning: (1) manually creating your own data and model, (2) only using an LLM API, or (3) using an LLM API to create synthetic data for a specialized model? The table below displays the trade-offs across different factors and we discuss different metrics based on our example dataset underneath.
<p align="center">
<img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/blog/176_synthetic-data-save-costs/table_pros_cons.png" alt="table_pros_cons" width=95%>
</p>
Let's start with task performance. As demonstrated above, the specialized model performs on par with much larger LLMs. The fine-tuned model can only do the one specific task we have trained it to do, but it does this specific task very well. It would be trivial to create more training data to adapt the model to new domains or more complex tasks. Thanks to synthetic data from LLMs, low performance due to lack of specialized data is not a problem anymore.
Second, compute costs and inference speed. The main compute costs in practice will be inference, i.e. running the model after it has been trained. Let's assume that in your production use case, you need to process 1 million sentences in a given time period. Our fine-tuned RoBERTa-base model runs efficiently on a small T4 GPU with 16GB RAM, which costs $0.6 per hour on an [Inference Endpoint](https://ui.endpoints.huggingface.co/). It has a latency of 0.13 seconds and a throughput of 61 sentences per second with `batch_size=8`. This leads to a total cost of $2.7 for processing 1 million sentences.
With GPT models, we can calculate inference costs by counting tokens. Processing the tokens in 1 million sentences would cost ~$153 with GPT3.5 and ~$3061 with GPT4. The latency and throughput for these models are more complicated to calculate as they vary throughout the day depending on the current server load. Anyone working with GPT4 knows, however, that latency can often be multiple seconds and is rate-limited. Note that speed is an issue for any LLM (API), including open-source LLMs. Many generative LLMs are simply too large to be fast.
Training compute costs tend to be less relevant, as LLMs can often be used out-of-the-box without fine-tuning, and the fine-tuning costs of smaller models are relatively small (fine-tuning RoBERTa-base costs less than $1). Only in very few cases do you need to invest in pre-training a model from scratch. Training costs can become relevant when fine-tuning a larger generative LLM to specialize it in a specific generative task.
Third, required investments in time and expertise. This is the main strong point of LLM APIs. It is significantly easier to send instructions to an API than to manually collect data, fine-tune a custom model, and deploy it. This is exactly where using an LLM API to create synthetic data becomes important. Creating good training data becomes significantly easier. Fine-tuning and deployment can then be handled by services like AutoTrain and dedicated Inference Endpoints.
Fourth, control. This is probably the main disadvantage of LLM APIs. By design, LLM APIs make you dependent on the LLM API provider. You need to send your sensitive data to someone else’s servers and you cannot control the reliability and speed of your system. Training your own model lets you choose how and where to deploy it.
Lastly, environmental impact. It's very difficult to estimate the energy consumption and CO2 emissions of closed models like GPT4, given the lack of information on model architecture and hardware infrastructure. The [best (yet very rough) estimate](https://towardsdatascience.com/chatgpts-energy-use-per-query-9383b8654487) we could find, puts the energy consumption per GPT4 query at around 0.0017 to 0.0026 KWh. This would lead to very roughly 1700 - 2600 KWh for analyzing 1 million sentences. According to the [EPA CO2 equivalence calculator](https://www.epa.gov/energy/greenhouse-gas-equivalencies-calculator), this is equivalent to 0.735 - 1.1 metric tons of CO2, or 1885 - 2883 miles driven by an average car. Note that the actual CO2 emissions can vary widely depending on the energy mix in the LLM's specific compute region. This estimate is much easier with our custom model. Analysing 1 million sentences with the custom model, takes around 4.52 hours on a T4 GPU and, on AWS servers in US East N. Virginia, this leads to around 0.12 kg of CO2 (see [ML CO2 Impact calculator](https://mlco2.github.io/impact/)). Running a general-purpose LLM like GPT4 with (allegedly) 8x220B parameters is ridiculously inefficient compared to a specialized model with ~0.13B parameters.
## Conclusion
We have shown the enormous benefits of using an LLM to create synthetic data to train a smaller, more efficient model. While this example only treats investor sentiment classification, the same pipeline could be applied to many other tasks, from other classification tasks (e.g. customer intent detection or harmful content detection), to token classification (e.g. named entity recognition or PII detection), or generative tasks (e.g. summarization or question answering).
In 2024, it has never been easier for companies to create their own efficient models, control their own data and infrastructure, reduce CO2 emissions, and save compute costs and time without having to compromise on accuracy.
Now try it out yourself! You can find the full reproduction code for all numbers in this blog post, as well as more efficient asynchronous functions with batching for API calls in the [reproduction repository](https://github.com/MoritzLaurer/synthetic-data-blog/tree/main). We invite you to copy and adapt our code to your use cases!
| 7 |
0 | hf_public_repos | hf_public_repos/blog/smolvlm.md | ---
title: SmolVLM - small yet mighty Vision Language Model
thumbnail: /blog/assets/smolvlm/banner.png
authors:
- user: andito
- user: merve
- user: mfarre
- user: eliebak
- user: pcuenq
---
## TLDR
This blog post introduces SmolVLM, a 2B VLM, SOTA for its memory footprint. SmolVLM is small, fast, memory-efficient, and fully open-source. All model checkpoints, VLM datasets, training recipes and tools are released under the Apache 2.0 license.
<img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/smolvlm_ecosystem.png" width="800" height="auto" alt="Image description">
## What is SmolVLM?
This year has seen a boom in multimodal AI with many large vision language models released. The trends were to initially scale up compute, later scale up the data diversity by generating synthetic data with large models, and, recently, scale down to make these models more efficient. Small open models allow local deployment to browser or edge devices, cut inference costs, and enable user customization. Some notable examples of these models include PaliGemma 3B, moondream2, and Qwen2VL.
In this blog post, we introduce [SmolVLM](https://huggingface.co/HuggingFaceTB/SmolVLM-Instruct), a new family of 2B small vision language models that can be used commercially and deployed to smaller local setups, with completely open training pipelines.
We release three models: [SmolVLM-Base](https://huggingface.co/HuggingFaceTB/SmolVLM-Base), which can be used for downstream fine-tuning, [SmolVLM-Synthetic](https://huggingface.co/HuggingFaceTB/SmolVLM-Synthetic), the fine-tuned variant on synthetic data, and [SmolVLM Instruct](https://huggingface.co/HuggingFaceTB/SmolVLM-Instruct), the fine-tuned instruction variant, which can be used out of the box for interactive end-user applications.
This release comes with open-source models integrated into transformers, [a demo built on SmolVLM Instruct](https://huggingface.co/spaces/HuggingFaceTB/SmolVLM), and a supervised fine-tuning script. We have used the datasets previously used for Idefics3: [the Cauldron](https://huggingface.co/datasets/HuggingFaceM4/the_cauldron) and [Docmatix](https://huggingface.co/datasets/HuggingFaceM4/Docmatix), which are also fully open-source.
## Table of Contents
- [TLDR](#TLDR)
- [What is SmolVLM?](#what-is-smolvlm)
* [Model capabilities](#model-capabilities)
* [Architecture](#Architecture)
- [Performance](#performance)
* [Benchmarks](#benchmarks)
* [Memory](#memory)
* [Throughput](#throughput)
* [Video](#video)
* [VLMEvalKit integration](#VLMEvalKit)
- [Use SmolVLM with Transformers](#use-smolvlm-with-transformers)
- [Training Details](#training-details)
* [Dataset](#dataset)
* [Context extension](#context-extension)
* [Checkpoint selection](#checkpoint-selection)
- [Fine-Tuning](#fine-tuning)
- [Wrapping up](#wrapping-up)
## Model capabilities
<table><thead><tr><th>Input Image</th><th>Input Prompt</th><th>Answer</th></tr></thead><tbody><tr>
<tr>
<td style="vertical-align: middle;"><img src="https://huggingface.co/datasets/huggingface/release-assets/resolve/main/wat_arun.jpg" width="300"/></td>
<td style="vertical-align: middle;">
<strong>I'm planning a visit to this temple, give me travel tips.</strong>
</td>
<td style="vertical-align: middle;">
The Grand Palace is the most famous landmark in Bangkok and it's worth visiting for its stunning architecture and history. Here are some of our favorite things about the palace:
**The Main Building:**
Located at the center of the complex, the main building features intricate tile work with traditional Thai motifs that cover every inch of the structure. This includes patterns such as dragons, mythical creatures, flowers, and geometric shapes. The roofline has an elaborate design featuring multiple layers of tiles adorned with gold accents.
**Towers & Spires:**
There are several towers within the Grand Palace including the Phra Nakhon Si Ayutthaya National Museum Tower which houses exhibits on Thai culture and heritage. Another notable tower is the Wat Yai Chai Mongkol Tower where you can find beautiful murals depicting scenes from Buddhist mythology.
**Architecture Style:**
The architectural style combines elements of Hindu and Khmer styles seen commonly throughout Southeast Asia. You'll notice influences like stupas (dome-shaped structures), lotus flower carvings, and ornate details typical of these regions' religious buildings.
**Visitor Experience:**
While exploring the Grand Palace, visitors should be aware of certain rules due to security measures taken by authorities. These include no photography inside certain areas or using flashbulbs outside. Additionally, there may be queues during peak tourist seasons so plan accordingly if you're traveling here.
### Travel Tips For Visiting The Grand Palace:
* **Best Time To Visit**: The best time to visit would likely be early morning before sunrise when temperatures drop significantly making walking around easier.
* **Transportation**: Public transportation options exist but they might not provide direct access to the Grand Palace itself; consider taking a taxi instead.
* **Food Options**: There aren't many dining facilities available near the Grand Palace yet local street food vendors do offer delicious meals nearby.
* **Clothing Guidelines**: While not mandatory, wearing traditional Thai clothing (sarongs) could add cultural immersion experience. However, shorts/trousers are allowed provided they don't go below knee level.
* **Safety Measures**: Visitors must follow signs indicating restricted zones marked "No Photography" among others. Always carry identification documents just in case.
By following these guidelines, your trip will undoubtedly enhance both your understanding of Thailand’s rich historical tapestry and personal enjoyment!
</td>
</tr>
<tr>
<td style="vertical-align: middle;"><img src="https://huggingface.co/datasets/huggingface/release-assets/resolve/main/weather_events.png" width="300"/></td>
<td style="vertical-align: middle;">
<strong>Where do the severe droughts happen according to this diagram?</strong>
</td>
<td style="vertical-align: middle;">
The severe droughts are happening in eastern and southern Africa.
</td>
</tr>
<tr>
<td style="vertical-align: middle;"><img src="https://huggingface.co/datasets/huggingface/release-assets/resolve/main/invoice.png" width="300"/></td>
<td style="vertical-align: middle;">
<strong>What is the due date and the invoice date?</strong>
</td>
<td style="vertical-align: middle;">
The due date is 26/02/2019, and the invoice date is 11/02/2019.
</td>
</tr>
</table>
## Architecture
<img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/self_attention_architecture_smolvlm.png" width="800" height="auto" alt="Image description">
For SmolVLM, we closely followed the architecture from Idefics3, to the point that we use the same implementation in transformers. There are, however a few key differences:
- We replaced Llama 3.1 8B with SmolLM2 1.7B as the language backbone.
- We more aggressively compress the patched visual information by reducing the information 9x using the pixel shuffle strategy, compared to 4x with idefics3.
- We use patches of 384*384, instead of 364x364, because 384 is divisible by 3, which is necessary for our pixel shuffle strategy to work.
- For this, we change the vision backbone to use shape-optimized SigLIP with patches of 384x384 pixels and inner patches of 14x14.
## Performance
### Benchmarks
We present benchmarks for the tasks we mention in training details.
| Model | MMMU (val) | MathVista (testmini) | MMStar (val) | DocVQA (test) | TextVQA (val) | Min GPU RAM required (GB) |
|-------------------|------------|----------------------|--------------|---------------|---------------|---------------------------|
| SmolVLM | 38.8 | 44.6 | 42.1 | 81.6 | 72.7 | 5.02 |
| Qwen2-VL 2B | 41.1 | 47.8 | 47.5 | 90.1 | 79.7 | 13.70 |
| InternVL2 2B | 34.3 | 46.3 | 49.8 | 86.9 | 73.4 | 10.52 |
| PaliGemma 3B 448px| 34.9 | 28.7 | 48.3 | 32.2 | 56.0 | 6.72 |
| moondream2 | 32.4 | 24.3 | 40.3 | 70.5 | 65.2 | 3.87 |
| MiniCPM-V-2 | 38.2 | 39.8 | 39.1 | 71.9 | 74.1 | 7.88 |
| MM1.5 1B | 35.8 | 37.2 | 0.0 | 81.0 | 72.5 | NaN |
### Memory
<img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/ram_smolvlm.png" width="900" height="auto" alt="Inference GPU memory use for SmolVLM and other models">
SmolVLM provides the best memory usage among the existing suite of vision language models in transformers. This allows it to run efficiently on-device, such as a laptop! You can see above the GPU memory usage in GBs for each model, running inference with one or two input images, and using the same images and text prompts in all tests. SmolVLM's efficiency in image encoding is built into the model. SmolVLM encodes each 384x384 image patch to 81 tokens. This results in SmolVLM encoding our test prompt and a single image in 1.2k tokens, whereas Qwen2-VL uses 16k tokens. This also explains why the memory consumption increases so much for 2 images with Qwen and InternVL. In contrast, the increase is much more moderate for SmolVLM and PaliGemma, which use a similar approach.
### Throughput
<img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/throughput_smolvlm.png" width="1100" height="auto" alt="Image description">
SmolVLM's tiny memory footprint also implies that it requires far fewer computations to prefill the model and generate. Compared to Qwen2-VL, the prefill throughput is 3.3 to 4.5 times faster, and the generation throughput is 7.5 to 16 times faster.
### Video
Given SmolVLM's long context and the possibility of tweaking the internal frame resizing of the model, we explored its suitability as an accessible option for basic video analysis tasks, particularly when computational resources are limited.
In our evaluation of SmolVLM's video understanding capabilities, we implemented a straightforward [video processing pipeline code](https://github.com/huggingface/smollm/blob/main/inference/smolvlm/SmolVLM_video_inference.py), extracting up to 50 evenly sampled frames from each video while avoiding internal frame resizing.
This simple approach yielded surprisingly competitive results on the CinePile benchmark, with a score of 27.14%, a performance that positions the model between InterVL2 (2B) and Video LlaVa (7B).
The quantitative results align with our qualitative testing, looking at an example from the FineVideo dataset:
<div><iframe width="100%" height="200" src="https://www.youtube.com/embed/afV0a64FRgM" frameborder="0" allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture" allowfullscreen></iframe></div>
```
Question: what is shown in the close-up?
Assistant: In the close-up, there are bottles of Heinz tomato ketchup,
honey, soy sauce, and garlic on a kitchen counter.
Question: describe what is happening in the video.
Assistant: The video shows a man cooking in a kitchen.
He is wearing a black shirt and a black apron.
He is standing in front of a counter and is holding a bottle of Heinz tomato ketchup,
a bottle of honey, soy sauce, and a bowl of minced garlic
```
While in the second question, we see some temporal understanding limitations (the cook points to one ingredient after the other rather than pointing/holding all of them at the same time) SmolVLM demonstrated great scene understanding and object recognition capabilities.
### VLMEvalKit integration
We integrated SmolVLM with [VLMEvalKit](https://github.com/open-compass/VLMEvalKit) to facilitate easy evaluation across additional benchmarks.
By running the following command, you can evaluate SmolVLM or your fine-tuned SmolVLM model.
```bash
python run.py --data <benchmarks> --model SmolVLM --work-dir <output_directory>
```
For example, to evaluate on MMMU dev validation set and MathVista mini and store the results in a folder called smol.
```bash
python run.py --data MMMU_DEV_VAL MathVista_MINI --model SmolVLM --work-dir smol
```
## Use SmolVLM with Transformers
You can easily load SmolVLM using the `Auto` classes in transformers. Under the hood, the model and processor are mapped to the same implementations used for Idefics3.
```python
from transformers import AutoProcessor, AutoModelForVision2Seq
import torch
DEVICE = "cuda" if torch.cuda.is_available() else "cpu"
processor = AutoProcessor.from_pretrained("HuggingFaceTB/SmolVLM-Instruct")
model = AutoModelForVision2Seq.from_pretrained("HuggingFaceTB/SmolVLM-Instruct",
torch_dtype=torch.bfloat16,
_attn_implementation="flash_attention_2" if DEVICE == "cuda" else "eager").to(DEVICE)
```
Image and text can be interleaved arbitrarily, and you can pass in multiple images. Here’s how you can use the chat template and pass in the formatted input to the processor.
```python
from PIL import Image
from transformers.image_utils import load_image
# Load images
image1 = load_image("https://huggingface.co/spaces/HuggingFaceTB/SmolVLM/resolve/main/example_images/rococo.jpg")
image2 = load_image("https://huggingface.co/spaces/HuggingFaceTB/SmolVLM/blob/main/example_images/rococo_1.jpg")
# Create input messages
messages = [
{
"role": "user",
"content": [
{"type": "image"},
{"type": "image"},
{"type": "text", "text": "Can you describe the two images?"}
]
},
]
# Prepare inputs
prompt = processor.apply_chat_template(messages, add_generation_prompt=True)
inputs = processor(text=prompt, images=[image1, image2], return_tensors="pt")
inputs = inputs.to(DEVICE)
```
Start generating with preprocessed input and decode the generated output.
```python
# Generate outputs
generated_ids = model.generate(**inputs, max_new_tokens=500)
generated_texts = processor.batch_decode(
generated_ids,
skip_special_tokens=True,
)
print(generated_texts[0])
```
## Training Details
### Dataset
First, we had to train SmolLM2 to extend it context, but we will discuss that in the next subsection. Once we had a long context SmolLM2, we trained SmolVLM using the same data that we used for Idefics3. Mainly, we used The Cauldron and Docmatix. The full list of datasets we used [can be consulted here](https://huggingface.co/HuggingFaceTB/SmolVLM-Instruct/blob/main/smolvlm-data.pdf).
<img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/mixture_the_cauldron.png" width="1100" height="auto" alt="Image description">
### Context extension
<img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/training_loss_smolvlm.png" width="1100" height="auto" alt="Image description">
SmolLM2’s pre-training context window is insufficient for VLMs. Images are encoded into many tokens, and we wanted to support multiple images. To address this, we extended it to 16k tokens by increasing the RoPE base value from 10k to 273k, following the guidelines in [“Scaling Laws of RoPE-based Extrapolation”](https://arxiv.org/abs/2310.05209). We fine-tuned the model on a mixture of long- and short-context datasets.
For long-context datasets, we used the “books” subset of Dolma (primarily Project Gutenberg) and code documents with 8k+ tokens from The Stack, each contributing 20% to the final mixture. For short-context datasets, we streamlined the original SmolLM2 pre-training mix to include 20% FineWeb-Edu, 20% DCLM, and 20% from our math dataset (to be released soon). The math dataset was upsampled to mitigate a performance drop observed on GSM8k during the context extension process.
All experiments were implemented using the [EasyContext repository](https://github.com/jzhang38/EasyContext).
### Checkpoint Selection
For our training run, we saved checkpoints every 25 optimization steps, allowing us to evaluate and potentially recover the model's state at different points in training. This practice is crucial for identifying the optimal model version, as training longer doesn't always guarantee better performance.
We evaluated the performance across multiple vision-language benchmarks, each weighted according to their importance. The core benchmarks included the following:
- General multimodal understanding (MMMU and MMStar) which are the most comprehensive benchmark.
- Document and text-based visual question answering (DocVQA and TextVQA)
- Mathematical Reasoning (MathVista)
- Diagram understanding (AI2D)
- General multimodal understanding (MMMU and MMStar).
To select the optimal checkpoint, we created a single metric by combining these benchmarks with different manually assigned weights to reflect their relative importance in assessing the model's capabilities. We used this single metric to select the best checkpoint. Generally, the models tended to do great on most benchmarks with more training, but their relative performance on DocVQA would decrease considerably.
## Fine-tuning
You can fine-tune SmolVLM using transformers and apply alignment techniques using TRL 🚀
We provide a [notebook](https://github.com/huggingface/smollm/blob/main/finetuning/Smol_VLM_FT.ipynb) to fine-tune it on the VQAv2 dataset, optionally using LoRA, QLoRA or full fine-tuning. In the notebook, you can find some tricks to save up even more memory and have a larger batch size to fit SmolVLM inside consumer GPUs, like L4, for training. With batch sizes of 4, 8-bit loading with QLoRA and gradient checkpointing we can fine-tune in L4, and it consumes around ~16 GBs of VRAM. This makes it possible to fine-tune your SmolVLM using Colab! You can play around with the parameters to get a nice point in training duration-memory trade-off.
SmolVLM also comes with TRL integration so you can apply Direct Preference Optimization (DPO) easily through the CLI. Get started by running `pip install trl accelerate peft` and then run the following command to fine-tune on [RLAIF-V](https://huggingface.co/datasets/HuggingFaceH4/rlaif-v_formatted) dataset:
``` bash
accelerate launch \
--config_file examples/accelerate_configs/multi_gpu.yaml examples/scripts/dpo_vlm.py \
--dataset_name HuggingFaceH4/rlaif-v_formatted \
--model_name_or_path HuggingFaceTB/SmolVLM-Instruct \
--per_device_train_batch_size 8 \
--gradient_accumulation_steps 32 \
--dataset_num_proc 32 \
--output_dir dpo_smolvlm_rlaif-v \
--bf16 --torch_dtype bfloat16 \
--use_peft --lora_target_modules=all-linear
```
The resulting LoRA adapter weights are [SmolVLM-Instruct-DPO](https://huggingface.co/HuggingFaceTB/SmolVLM-Instruct-DPO). A detailed tutorial on preference tuning vision-based LLM can be found here: [dpo_vlm](https://huggingface.co/blog/dpo_vlm).
## Wrapping Up
We introduced SmolVLM, a fully open, small, and mighty VLM for the community! We also provide tools for the community to use and customize it. We are looking forward to seeing what you will create with SmolVLM.
Below are some resources if you would like to read more about all things related to SmolVLM.
- Start playing with SmolVLM using [this demo](https://huggingface.co/spaces/HuggingFaceTB/SmolVLM).
- Learn how to fine-tune SmolVLM on VQAv2 using [this notebook](https://github.com/huggingface/smollm/blob/main/finetuning/Smol_VLM_FT.ipynb)
- Learn more about [vision language models](https://huggingface.co/blog/vlms)
| 8 |
0 | hf_public_repos | hf_public_repos/blog/train-dgx-cloud.md | ---
title: "Easily Train Models with H100 GPUs on NVIDIA DGX Cloud"
thumbnail: /blog/assets/train-dgx-cloud/thumbnail.jpg
authors:
- user: philschmid
- user: jeffboudier
- user: rafaelpierrehf
- user: abhishek
---
# Easily Train Models with H100 GPUs on NVIDIA DGX Cloud
Today, we are thrilled to announce the launch of **Train on DGX Cloud**, a new service on the Hugging Face Hub, available to Enterprise Hub organizations. Train on DGX Cloud makes it easy to use open models with the accelerated compute infrastructure of NVIDIA DGX Cloud. Together, we built Train on DGX Cloud so that Enterprise Hub users can easily access the latest NVIDIA H100 Tensor Core GPUs, to fine-tune popular Generative AI models like Llama, Mistral, and Stable Diffusion, in just a few clicks within the [Hugging Face Hub](https://huggingface.co/models).
<div align="center">
<img src="/blog/assets/train-dgx-cloud/thumbnail.jpg" alt="Thumbnail">
</div>
## GPU Poor No More
This new experience expands upon the [strategic partnership we announced last year](https://nvidianews.nvidia.com/news/nvidia-and-hugging-face-to-connect-millions-of-developers-to-generative-ai-supercomputing) to simplify the training and deployment of open Generative AI models on NVIDIA accelerated computing. One of the main problems developers and organizations face is the scarcity of GPU availability, and the time-consuming work of writing, testing, and debugging training scripts for AI models. Train with DGX Cloud offers an easy solution to these challenges, providing instant access to NVIDIA GPUs, starting with H100 on NVIDIA DGX Cloud. In addition, Train with DGX Cloud offers a simple no-code training job creation experience powered by Hugging Face AutoTrain and Hugging Face Spaces.
[Enterprise Hub](https://huggingface.co/enterprise) organizations can give their teams instant access to powerful NVIDIA GPUs, only incurring charges per minute of compute instances used for their training jobs.
_“Train on DGX Cloud is the easiest, fastest, most accessible way to train Generative AI models, combining instant access to powerful GPUs, pay-as-you-go, and no-code training,”_ says Abhishek Thakur, creator of Hugging Face AutoTrain. _“It will be a game changer for data scientists everywhere!”_
_"Today’s launch of Hugging Face Autotrain powered by DGX Cloud represents a noteworthy step toward simplifying AI model training,”_ said Alexis Bjorlin, vice president of DGX Cloud, NVIDIA. _“By integrating NVIDIA’s AI supercomputer in the cloud with Hugging Face’s user-friendly interface, we’re empowering organizations to accelerate their AI innovation."_
## How it works
Training Hugging Face models on NVIDIA DGX Cloud has never been easier. Below you will find a step-by-step tutorial to fine-tune Mistral 7B.
_Note: You need access to an Organization with a [Hugging Face Enterprise](https://huggingface.co/enterprise) subscription to use Train on DGX Cloud_
You can find Train on DGX Cloud on the model page of supported Generative AI models. It currently supports the following model architectures: Llama, Falcon, Mistral, Mixtral, T5, Gemma, Stable Diffusion, and Stable Diffusion XL.
<div align="center">
<img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/autotrain-dgx-cloud/01%20model%20card.png" alt="Model Card">
</div>
Open the “Train” menu, and select “NVIDIA DGX Cloud” - this will open an interface where you can select your Enterprise Organization.
<div align="center">
<img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/autotrain-dgx-cloud/02%20select%20organization.png" alt="Organization Selection">
</div>
Then, click on “Create new Space”. When using Train on DGX Cloud for the first time, the service will create a new Hugging Face Space within your Organization, so you can use AutoTrain to create training jobs that will be executed on NVIDIA DGX Cloud. When you want to create another training job later, you will automatically be redirected back to the existing AutoTrain Space.
Once in the AutoTrain Space, you can create your training job by configuring the Hardware, Base Model, Task, and Training Parameters.
<div align="center">
<img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/autotrain-dgx-cloud/03%20start.png" alt="Create AutoTrain Job">
</div>
For Hardware, you can select NVIDIA H100 GPUs, available in 1x, 2x, 4x and 8x instances, or L40S GPUs (coming soon). The training dataset must be directly uploaded in the “Upload Training File(s)” area. CSV and JSON files are currently supported. Make sure that the column mapping is correct following the example below. For Training Parameters, you can directly edit the JSON configuration on the right side, e.g., changing the number of epochs from 3 to 2.
When everything is set up, you can start your training by clicking “Start Training”. AutoTrain will now validate your dataset, and ask you to confirm the training.
<div align="center">
<img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/autotrain-dgx-cloud/04%20success.png" alt="Launched Training Job">
</div>
You can monitor your training by opening the “logs” of the Space.
<div align="center">
<img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/autotrain-dgx-cloud/05%20logs.png" alt="Training Logs">
</div>
After your training is complete, your fine-tuned model will be uploaded to a new private repository within your selected namespace on the Hugging Face Hub.
Train on DGX Cloud is available today for all Enterprise Hub Organizations! Give the service a try, and let us know your feedback!
## Pricing for Train on DGX Cloud
Usage of Train on DGX Cloud is billed by the minute of the GPU instances used during your training jobs. Current prices for training jobs are $8.25 per GPU hour for H100 instances, and $2.75 per GPU hour for L40S instances. Usage fees accrue to your Enterprise Hub Organizations’ current monthly billing cycle, once a job is completed. You can check your current and past usage at any time within the billing settings of your Enterprise Hub Organization.
<table>
<tr>
<td>NVIDIA GPU
</td>
<td>GPU Memory
</td>
<td>On-Demand Price/hr
</td>
</tr>
<tr>
<td><a href="https://www.nvidia.com/en-us/data-center/l40/">NVIDIA L40S</a>
</td>
<td>48GB
</td>
<td>$2.75
</td>
</tr>
<tr>
<td><a href="https://www.nvidia.com/de-de/data-center/h100/">NVIDIA H100</a>
</td>
<td>80 GB
</td>
<td>$8.25
</td>
</tr>
</table>
For example, fine-tuning Mistral 7B on 1500 samples on a single NVIDIA L40S takes ~10 minutes and costs ~$0.45.
## We’re just getting started
We are excited to collaborate with NVIDIA to democratize accelerated machine learning across open science, open source, and cloud services.
Our collaboration on open science through [BigCode](https://huggingface.co/bigcode) enabled the training of [StarCoder 2 15B](https://huggingface.co/bigcode/starcoder2-15b), a fully open, state-of-the-art code LLM trained on more than 600 languages.
Our collaboration on open source is fueling the new [optimum-nvidia library](https://github.com/huggingface/optimum-nvidia), accelerating the inference of LLMs on the latest NVIDIA GPUs and already achieving 1,200 tokens per second with Llama 2.
Our collaboration on cloud services created Train on DGX Cloud today. We are also working with NVIDIA to optimize inference and make accelerated computing more accessible to the Hugging Face community, leveraging our collaboration on [NVIDIA TensorRT-LLM](https://developer.nvidia.com/blog/optimizing-inference-on-llms-with-tensorrt-llm-now-publicly-available/) and [optimum-nvidia](https://github.com/huggingface/optimum-nvidia). In addition, some of the most popular open models on Hugging Face will be on [NVIDIA NIM microservices](https://developer.nvidia.com/blog/nvidia-nim-offers-optimized-inference-microservices-for-deploying-ai-models-at-scale/), which was announced today at GTC.
For those attending GTC this week, make sure to watch session [S63149](https://www.nvidia.com/gtc/session-catalog/?tab.allsessions=1700692987788001F1cG&search=S63149#/session/1704937870817001eXsB) on Wednesday 3/20, at 3pm PT where [Jeff](https://huggingface.co/jeffboudier) will guide you through Train on DGX Cloud and more. Also don't miss the next Hugging Cast where we will give a live demo of Train on DGX Cloud and you can ask questions directly to [Abhishek](https://huggingface.co/abhishek) and [Rafael](https://huggingface.co/rafaelpierrehf) on Thursday, 3/21, at 9am PT / 12pm ET / 17h CET - [Watch record here](https://www.youtube.com/watch?v=Vp1zZGBUy9o).
| 9 |
0 | hf_public_repos | hf_public_repos/blog/leaderboard-vectara.md | ---
title: "A guide to setting up your own Hugging Face leaderboard: an end-to-end example with Vectara's hallucination leaderboard"
thumbnail: /blog/assets/leaderboards-on-the-hub/thumbnail.png
authors:
- user: ofermend
guest: true
- user: minseokbae
guest: true
- user: clefourrier
---
# A guide to setting up your own Hugging Face leaderboard: an end-to-end example with Vectara's hallucination leaderboard
Hugging Face’s [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard) (originally created by Ed Beeching and Lewis Tunstall, and maintained by Nathan Habib and Clémentine Fourrier) is well known for tracking the performance of open source LLMs, comparing their performance in a variety of tasks, such as [TruthfulQA](https://github.com/sylinrl/TruthfulQA) or [HellaSwag](https://rowanzellers.com/hellaswag/).
This has been of tremendous value to the open-source community, as it provides a way for practitioners to keep track of the best open-source models.
In late 2023, at Vectara we introduced the [Hughes Hallucination Evaluation Model](https://huggingface.co/vectara/hallucination_evaluation_model) (HHEM), an open-source model for measuring the extent to which an LLM [hallucinates](https://vectara.com/glossary-of-llm-terms/#h-hallucinations-aka-llm-hallucinations) (generates text that is nonsensical or unfaithful to the provided source content). Covering both open source models like Llama 2 or Mistral 7B, as well as commercial models like OpenAI’s GPT-4, Anthropic Claude, or Google’s Gemini, this model highlighted the stark differences that currently exist between models in terms of their likelihood to hallucinate.
As we continue to add new models to HHEM, we were looking for an open-source solution to manage and update the HHEM leaderboard.
Quite recently, the Hugging Face leaderboard team released leaderboard templates ([here](https://huggingface.co/demo-leaderboard) and [here](https://huggingface.co/demo-leaderboard-backend)). These are lightweight versions of the Open LLM Leaderboard itself, which are both open-source and simpler to use than the original code.
Today we’re happy to announce the release of the [new HHEM leaderboard](https://huggingface.co/spaces/vectara/leaderboard), powered by the [HF leaderboard template](https://huggingface.co/demo-leaderboard-backend).
## Vectara’s Hughes Hallucination Evaluation Model (HHEM)
The Hughes Hallucination Evaluation Model (HHEM) Leaderboard is dedicated to assessing the frequency of hallucinations in document summaries generated by Large Language Models (LLMs) such as GPT-4, Google Gemini or Meta’s Llama 2. To use it you can follow the instructions [here](https://huggingface.co/vectara/hallucination_evaluation_model).
By doing an open-source release of this model, we at [Vectara](https://vectara.com) aim to democratize the evaluation of LLM hallucinations, driving awareness to the differences that exist in LLM performance in terms of propensity to hallucinate.
Our initial release of HHEM was a [Huggingface model](https://huggingface.co/vectara/hallucination_evaluation_model) alongside a [Github repository](https://github.com/vectara/hallucination-leaderboard), but we quickly realized that we needed a mechanism to allow new types of models to be evaluated. Using the HF leaderboard code template, we were able to quickly put together a new leaderboard that allows for dynamic updates, and we encourage the LLM community to submit new relevant models for HHEM evaluation.
<br>
<div style="background-color: #e6f9e6; padding: 16px 32px; outline: 2px solid; border-radius: 10px;">
On a meaningful side note to us here at Vectara, the HHEM was named after our peer Simon Hughes, who passed away in Nov. of 2023 without notice of natural causes; we decided to name it in his honor due to his <a href="https://diginomica.com/final-thoughts-memory-simon-mark-hughes-hallucination-research-pioneer" target="_blank">lasting legacy</a> in this space.
</div>
## Setting up HHEM with the LLM leaderboard template
To set up the Vectara HHEM leaderboard, we had to follow a few steps, adjusting the HF leaderboard template code to our needs:
1. After cloning the space repository to our own organization, we created two associated datasets: “requests” and “results”; these datasets maintain the requests submitted by users for new LLMs to evaluate, and the results of such evaluations, respectively.
2. We populated the results dataset with existing results from the initial launch, and updated the “About” and “Citations” sections.
For a simple leaderboard, where evaluations results are pushed by your backend to the results dataset, that’s all you need!
As our evaluation is more complex, we then customized the source code to fit the needs of the HHEM leaderboard - here are the details:
1. `leaderboard/src/backend/model_operations.py`: This file contains two primary classes - `SummaryGenerator` and `EvaluationModel`.
a. The `SummaryGenerator` generates summaries based on the HHEM private evaluation dataset and calculates metrics like Answer Rate and Average Summary Length.
b. The `EvaluationModel` loads our proprietary Hughes Hallucination Evaluation Model (HHEM) to assess these summaries, yielding metrics such as Factual Consistency Rate and Hallucination Rate.
2. `leaderboard/src/backend/evaluate_model.py`: defines the `Evaluator` class which utilizes both `SummaryGenerator` and `EvaluationModel` to compute and return results in JSON format.
3. `leaderboard/src/backend/run_eval_suite.py`: contains a function `run_evaluation` that leverages `Evaluator` to obtain and upload evaluation results to the `results` dataset mentioned above, causing them to appear in the leaderboard.
4. `leaderboard/main_backend.py`: Manages pending evaluation requests and executes auto evaluations using aforementioned classes and functions. It also includes an option for users to replicate our evaluation results.
The final source code is available in [the Files tab](https://huggingface.co/spaces/vectara/leaderboard/tree/main) of our [HHEM leaderboard repository](https://huggingface.co/spaces/vectara/leaderboard).
With all these changes, we now have the evaluation pipeline ready to go, and easily deployable as a Huggingface Space.
## Summary
The [HHEM](https://huggingface.co/vectara/hallucination_evaluation_model) is a novel classification model that can be used to evaluate the extent to which LLMs hallucinate. Our use of the Hugging Face leaderboard template provided much needed support for a common need for any leaderboard: the ability to manage the submission of new model evaluation requests, and the update of the leaderboard as new results emerge.
Big kudos to the Hugging Face team for making this valuable framework open-source, and supporting the Vectara team in the implementation. We expect this code to be reused by other community members who aim to publish other types of LLM leaderboards.
If you want to contribute to the HHEM with new models, please submit it on the leaderboard - we very much appreciate any suggestions for new models to evaluate.
And if you have any questions about the Hugging Face LLM front-end or Vectara, please feel free to reach out in the [Vectara](https://discuss.vectara.com/) or [Huggingface](https://discuss.huggingface.co/) forums.
| 0 |
0 | hf_public_repos | hf_public_repos/blog/mask2former.md | ---
title: Universal Image Segmentation with Mask2Former and OneFormer
thumbnail: /blog/assets/127_mask2former/thumbnail.png
authors:
- user: nielsr
- user: shivi
- user: adirik
---
# Universal Image Segmentation with Mask2Former and OneFormer
<script async defer src="https://unpkg.com/medium-zoom-element@0/dist/medium-zoom-element.min.js"></script>
**This guide introduces Mask2Former and OneFormer, 2 state-of-the-art neural networks for image segmentation. The models are now available in [`🤗 transformers`](https://huggingface.co/transformers), an open-source library that offers easy-to-use implementations of state-of-the-art models. Along the way, you'll learn about the difference between the various forms of image segmentation.**
## Image segmentation
Image segmentation is the task of identifying different "segments" in an image, like people or cars. More technically, image segmentation is the task of grouping pixels with different semantics. Refer to the Hugging Face [task page](https://huggingface.co/tasks/image-segmentation) for a brief introduction.
Image segmentation can largely be split into 3 subtasks - instance, semantic and panoptic segmentation - with numerous methods and model architectures to perform each subtask.
- **instance segmentation** is the task of identifying different "instances", like individual people, in an image. Instance segmentation is very similar to object detection, except that we'd like to output a set of binary segmentation masks, rather than bounding boxes, with corresponding class labels. Instances are oftentimes also called "objects" or "things". Note that individual instances may overlap.
- **semantic segmentation** is the task of identifying different "semantic categories", like "person" or "sky" of each pixel in an image. Contrary to instance segmentation, no distinction is made between individual instances of a given semantic category; one just likes to come up with a mask for the "person" category, rather than for the individual people for example. Semantic categories which don't have individual instances, like "sky" or "grass", are oftentimes referred to as "stuff", to make the distinction with "things" (great names, huh?). Note that no overlap between semantic categories is possible, as each pixel belongs to one category.
- **panoptic segmentation**, introduced in 2018 by [Kirillov et al.](https://arxiv.org/abs/1801.00868), aims to unify instance and semantic segmentation, by making models simply identify a set of "segments", each with a corresponding binary mask and class label. Segments can be both "things" or "stuff". Unlike in instance segmentation, no overlap between different segments is possible.
The figure below illustrates the difference between the 3 subtasks (taken from [this blog post](https://www.v7labs.com/blog/panoptic-segmentation-guide)).
<p align="center">
<img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/blog/127_mask2former/semantic_vs_semantic_vs_panoptic.png" alt="drawing" width=500>
</p>
Over the last years, researchers have come up with several architectures that were typically very tailored to either instance, semantic or panoptic segmentation. Instance and panoptic segmentation were typically solved by outputting a set of binary masks + corresponding labels per object instance (very similar to object detection, except that one outputs a binary mask instead of a bounding box per instance). This is oftentimes called "binary mask classification". Semantic segmentation on the other hand was typically solved by making models output a single "segmentation map" with one label per pixel. Hence, semantic segmentation was treated as a "per-pixel classification" problem. Popular semantic segmentation models which adopt this paradigm are [SegFormer](https://huggingface.co/docs/transformers/model_doc/segformer), on which we wrote an extensive [blog post](https://huggingface.co/blog/fine-tune-segformer), and [UPerNet](https://huggingface.co/docs/transformers/main/en/model_doc/upernet).
## Universal image segmentation
Luckily, since around 2020, people started to come up with models that can solve all 3 tasks (instance, semantic and panoptic segmentation) with a unified architecture, using the same paradigm. This started with [DETR](https://huggingface.co/docs/transformers/model_doc/detr), which was the first model that solved panoptic segmentation using a "binary mask classification" paradigm, by treating "things" and "stuff" classes in a unified way. The key innovation was to have a Transformer decoder come up with a set of binary masks + classes in a parallel way. This was then improved in the [MaskFormer](https://huggingface.co/docs/transformers/model_doc/maskformer) paper, which showed that the "binary mask classification" paradigm also works really well for semantic segmentation.
[Mask2Former](https://huggingface.co/docs/transformers/main/model_doc/mask2former) extends this to instance segmentation by further improving the neural network architecture. Hence, we've evolved from separate architectures to what researchers now refer to as "universal image segmentation" architectures, capable of solving any image segmentation task. Interestingly, these universal models all adopt the "mask classification" paradigm, discarding the "per-pixel classification" paradigm entirely. A figure illustrating Mask2Former's architecture is depicted below (taken from the [original paper](https://arxiv.org/abs/2112.01527)).
<p align="center">
<img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/transformers/model_doc/mask2former_architecture.jpg" alt="drawing" width=500>
</p>
In short, an image is first sent through a backbone (which, in the paper could be either [ResNet](https://huggingface.co/docs/transformers/model_doc/resnet) or [Swin Transformer](https://huggingface.co/docs/transformers/model_doc/swin)) to get a list of low-resolution feature maps. Next, these feature maps are enhanced using a pixel decoder module to get high-resolution features. Finally, a Transformer decoder takes in a set of queries and transforms them into a set of binary mask and class predictions, conditioned on the pixel decoder's features.
Note that Mask2Former still needs to be trained on each task separately to obtain state-of-the-art results. This has been improved by the [OneFormer](https://arxiv.org/abs/2211.06220) model, which obtains state-of-the-art performance on all 3 tasks by only training on a panoptic version of the dataset (!), by adding a text encoder to condition the model on either "instance", "semantic" or "panoptic" inputs. This model is also as of today [available in 🤗 transformers](https://huggingface.co/docs/transformers/main/en/model_doc/oneformer). It's even more accurate than Mask2Former, but comes with greater latency due to the additional text encoder. See the figure below for an overview of OneFormer. It leverages either Swin Transformer or the new [DiNAT](https://huggingface.co/docs/transformers/model_doc/dinat) model as backbone.
<p align="center">
<img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/transformers/model_doc/oneformer_architecture.png" alt="drawing" width=500>
</p>
## Inference with Mask2Former and OneFormer in Transformers
Usage of Mask2Former and OneFormer is pretty straightforward, and very similar to their predecessor MaskFormer. Let's instantiate a Mask2Former model from the hub trained on the COCO panoptic dataset, along with its processor. Note that the authors released no less than [30 checkpoints](https://huggingface.co/models?other=mask2former) trained on various datasets.
```
from transformers import AutoImageProcessor, Mask2FormerForUniversalSegmentation
processor = AutoImageProcessor.from_pretrained("facebook/mask2former-swin-base-coco-panoptic")
model = Mask2FormerForUniversalSegmentation.from_pretrained("facebook/mask2former-swin-base-coco-panoptic")
```
Next, let's load the familiar cats image from the COCO dataset, on which we'll perform inference.
```
from PIL import Image
url = "http://images.cocodataset.org/val2017/000000039769.jpg"
image = Image.open(requests.get(url, stream=True).raw)
image
```
<img src="assets/78_annotated-diffusion/output_cats.jpeg" width="400" />
We prepare the image for the model using the image processor, and forward it through the model.
```
inputs = processor(image, return_tensors="pt")
with torch.no_grad():
outputs = model(**inputs)
```
The model outputs a set of binary masks and corresponding class logits. The raw outputs of Mask2Former can be easily postprocessed using the image processor to get the final instance, semantic or panoptic segmentation predictions:
```
prediction = processor.post_process_panoptic_segmentation(outputs, target_sizes=[image.size[::-1]])[0]
print(prediction.keys())
```
<div class="output stream stdout">
Output:
----------------------------------------------------------------------------------------------------
dict_keys(['segmentation', 'segments_info'])
</div>
In panoptic segmentation, the final `prediction` contains 2 things: a `segmentation` map of shape (height, width) where each value encodes the instance ID of a given pixel, as well as a corresponding `segments_info`. The `segments_info` contains more information about the individual segments of the map (such as their class / category ID). Note that Mask2Former outputs binary mask proposals of shape (96, 96) for efficiency and the `target_sizes` argument is used to resize the final mask to the original image size.
Let's visualize the results:
```
from collections import defaultdict
import matplotlib.pyplot as plt
import matplotlib.patches as mpatches
from matplotlib import cm
def draw_panoptic_segmentation(segmentation, segments_info):
# get the used color map
viridis = cm.get_cmap('viridis', torch.max(segmentation))
fig, ax = plt.subplots()
ax.imshow(segmentation)
instances_counter = defaultdict(int)
handles = []
# for each segment, draw its legend
for segment in segments_info:
segment_id = segment['id']
segment_label_id = segment['label_id']
segment_label = model.config.id2label[segment_label_id]
label = f"{segment_label}-{instances_counter[segment_label_id]}"
instances_counter[segment_label_id] += 1
color = viridis(segment_id)
handles.append(mpatches.Patch(color=color, label=label))
ax.legend(handles=handles)
draw_panoptic_segmentation(**panoptic_segmentation)
```
<img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/blog/127_mask2former/cats_panoptic_result.png" width="400" />
Here, we can see that the model is capable of detecting the individual cats and remotes in the image. Semantic segmentation on the other hand would just create a single mask for the "cat" category.
To perform inference with OneFormer, which has an identical API except that it also takes an additional text prompt as input, we refer to the [demo notebook](https://github.com/NielsRogge/Transformers-Tutorials/tree/master/OneFormer).
## Fine-tuning Mask2Former and OneFormer in Transformers
For fine-tuning Mask2Former/OneFormer on a custom dataset for either instance, semantic and panoptic segmentation, check out our [demo notebooks](https://github.com/NielsRogge/Transformers-Tutorials/tree/master/MaskFormer/Fine-tuning). MaskFormer, Mask2Former and OneFormer share a similar API so upgrading from MaskFormer is easy and requires minimal changes.
The demo notebooks make use of `MaskFormerForInstanceSegmentation` to load the model whereas you'll have to switch to using either `Mask2FormerForUniversalSegmentation` or `OneFormerForUniversalSegmentation`. In case of image processing for Mask2Former, you'll also have to switch to using `Mask2FormerImageProcessor`. You can also load the image processor using the `AutoImageProcessor` class which automatically takes care of loading the correct processor corresponding to your model. OneFormer on the other hand requires a `OneFormerProcessor`, which prepares the images, along with a text input, for the model.
## Conclusion
That's it! You now know about the difference between instance, semantic and panoptic segmentation, as well as how to use "universal architectures" such as Mask2Former and OneFormer using the [🤗 transformers](https://huggingface.co/transformers) library.
We hope you enjoyed this post and learned something. Feel free to let us know whether you are satisfied with the results when fine-tuning Mask2Former or OneFormer.
If you liked this topic and want to learn more, we recommend the following resources:
- Our demo notebooks for [MaskFormer](https://github.com/NielsRogge/Transformers-Tutorials/blob/master/MaskFormer), [Mask2Former](https://github.com/NielsRogge/Transformers-Tutorials/blob/master/Mask2Former) and [OneFormer](https://github.com/NielsRogge/Transformers-Tutorials/blob/master/OneFormer), which give a broader overview on inference (including visualization) as well as fine-tuning on custom data.
- The [live demo spaces] for [Mask2Former](https://huggingface.co/spaces/shivi/mask2former-demo) and [OneFormer](https://huggingface.co/spaces/shi-labs/OneFormer) available on the Hugging Face Hub which you can use to quickly try out the models on sample inputs of your choice.
| 1 |
0 | hf_public_repos | hf_public_repos/blog/tf_tpu.md | ---
title: "Training a language model with 🤗 Transformers using TensorFlow and TPUs"
thumbnail: /blog/assets/tf_tpu_training/thumbnail.png
authors:
- user: rocketknight1
- user: sayakpaul
---
# Training a language model with 🤗 Transformers using TensorFlow and TPUs
## Introduction
TPU training is a useful skill to have: TPU pods are high-performance and extremely scalable, making it easy to train models at any scale from a few tens of millions of parameters up to truly enormous sizes: Google’s PaLM model (over 500 billion parameters!) was trained entirely on TPU pods.
We’ve previously written a [tutorial](https://huggingface.co/docs/transformers/main/perf_train_tpu_tf) and a [Colab example](https://colab.research.google.com/github/huggingface/notebooks/blob/main/examples/tpu_training-tf.ipynb) showing small-scale TPU training with TensorFlow and introducing the core concepts you need to understand to get your model working on TPU. This time, we’re going to step that up another level and train a masked language model from scratch using TensorFlow and TPU, including every step from training your tokenizer and preparing your dataset through to the final model training and uploading. This is the kind of task that you’ll probably want a dedicated TPU node (or VM) for, rather than just Colab, and so that’s where we’ll focus.
As in our Colab example, we’re taking advantage of TensorFlow's very clean TPU support via XLA and `TPUStrategy`. We’ll also be benefiting from the fact that the majority of the TensorFlow models in 🤗 Transformers are fully [XLA-compatible](https://huggingface.co/blog/tf-xla-generate). So surprisingly, little work is needed to get them to run on TPU.
Unlike our Colab example, however, this example is designed to be **scalable** and much closer to a realistic training run -- although we only use a BERT-sized model by default, the code could be expanded to a much larger model and a much more powerful TPU pod slice by changing a few configuration options.
## Motivation
Why are we writing this guide now? After all, 🤗 Transformers has had support for TensorFlow for several years now. But getting those models to train on TPUs has been a major pain point for the community. This is because:
- Many models weren’t XLA-compatible
- Data collators didn’t use native TF operations
We think XLA is the future: It’s the core compiler for JAX, it has first-class support in TensorFlow, and you can even use it from [PyTorch](https://github.com/pytorch/xla). As such, we’ve made a [big push](https://blog.tensorflow.org/2022/11/how-hugging-face-improved-text-generation-performance-with-xla.html) to make our codebase XLA compatible and to remove any other roadblocks standing in the way of XLA and TPU compatibility. This means users should be able to train most of our TensorFlow models on TPUs without hassle.
There’s also another important reason to care about TPU training right now: Recent major advances in LLMs and generative AI have created huge public interest in model training, and so it’s become incredibly hard for most people to get access to state-of-the-art GPUs. Knowing how to train on TPU gives you another path to access ultra-high-performance compute hardware, which is much more dignified than losing a bidding war for the last H100 on eBay and then ugly crying at your desk. You deserve better. And speaking from experience: Once you get comfortable with training on TPU, you might not want to go back.
## What to expect
We’re going to train a [RoBERTa](https://huggingface.co/docs/transformers/model_doc/roberta) (base model) from scratch on the [WikiText dataset (v1)](https://huggingface.co/datasets/wikitext). As well as training the model, we’re also going to train the tokenizer, tokenize the data and upload it to Google Cloud Storage in TFRecord format, where it’ll be accessible for TPU training. You can find all the code in [this directory](https://github.com/huggingface/transformers/tree/main/examples/tensorflow/language-modeling-tpu). If you’re a certain kind of person, you can skip the rest of this blog post and just jump straight to the code. If you stick around, though, we’ll take a deeper look at some of the key ideas in the codebase.
Many of the ideas here were also mentioned in our [Colab example](https://colab.research.google.com/github/huggingface/notebooks/blob/main/examples/tpu_training-tf.ipynb), but we wanted to show users a full end-to-end example that puts it all together and shows it in action, rather than just covering concepts at a high level. The following diagram gives you a pictorial overview of the steps involved in training a language model with 🤗 Transformers using TensorFlow and TPUs:
<p align="center">
<img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/blog/tf_tpu/tf_tpu_steps.png" alt="tf-tpu-training-steps"/><br>
</p>
## Getting the data and training a tokenizer
As mentioned, we used the [WikiText dataset (v1)](https://huggingface.co/datasets/wikitext). You can head over to the [dataset page on the Hugging Face Hub](https://huggingface.co/datasets/wikitext) to explore the dataset.
<p align="center">
<img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/blog/tf_tpu/wikitext_explore.png" alt="dataset-explore"/><br>
</p>
Since the dataset is already available on the Hub in a compatible format, we can easily load and interact with it using 🤗 datasets. However, for this example, since we’re also training a tokenizer from scratch, here’s what we did:
- Loaded the `train` split of the WikiText using 🤗 datasets.
- Leveraged 🤗 tokenizers to train a [Unigram model](https://huggingface.co/course/chapter6/7?fw=pt).
- Uploaded the trained tokenizer on the Hub.
You can find the tokenizer training code [here](https://github.com/huggingface/transformers/tree/main/examples/tensorflow/language-modeling-tpu#training-a-tokenizer) and the tokenizer [here](https://huggingface.co/tf-tpu/unigram-tokenizer-wikitext). This script also allows you to run it with [any compatible dataset](https://huggingface.co/datasets?task_ids=task_ids:language-modeling) from the Hub.
> 💡 It’s easy to use 🤗 datasets to host your text datasets. Refer to [this guide](https://huggingface.co/docs/datasets/create_dataset) to learn more.
## Tokenizing the data and creating TFRecords
Once the tokenizer is trained, we can use it on all the dataset splits (`train`, `validation`, and `test` in this case) and create TFRecord shards out of them. Having the data splits spread across multiple TFRecord shards helps with massively parallel processing as opposed to having each split in single TFRecord files.
We tokenize the samples individually. We then take a batch of samples, concatenate them together, and split them into several chunks of a fixed size (128 in our case). We follow this strategy rather than tokenizing a batch of samples with a fixed length to avoid aggressively discarding text content (because of truncation).
We then take these tokenized samples in batches and serialize those batches as multiple TFRecord shards, where the total dataset length and individual shard size determine the number of shards. Finally, these shards are pushed to a [Google Cloud Storage (GCS) bucket](https://cloud.google.com/storage/docs/json_api/v1/buckets).
If you’re using a TPU node for training, then the data needs to be streamed from a GCS bucket since the node host memory is very small. But for TPU VMs, we can use datasets locally or even attach persistent storage to those VMs. Since TPU nodes are still quite heavily used, we based our example on using a GCS bucket for data storage.
You can see all of this in code in [this script](https://github.com/huggingface/transformers/blob/main/examples/tensorflow/language-modeling-tpu/prepare_tfrecord_shards.py). For convenience, we have also hosted the resultant TFRecord shards in [this repository](https://huggingface.co/datasets/tf-tpu/wikitext-v1-tfrecords) on the Hub.
## Training a model on data in GCS
If you’re familiar with using 🤗 Transformers, then you already know the modeling code:
```python
from transformers import AutoConfig, AutoTokenizer, TFAutoModelForMaskedLM
tokenizer = AutoTokenizer.from_pretrained("tf-tpu/unigram-tokenizer-wikitext")
config = AutoConfig.from_pretrained("roberta-base")
config.vocab_size = tokenizer.vocab_size
model = TFAutoModelForMaskedLM.from_config(config)
```
But since we’re in the TPU territory, we need to perform this initialization under a strategy scope so that it can be distributed across the TPU workers with data-parallel training:
```python
import tensorflow as tf
tpu = tf.distribute.cluster_resolver.TPUClusterResolver(...)
strategy = tf.distribute.TPUStrategy(tpu)
with strategy.scope():
tokenizer = AutoTokenizer.from_pretrained("tf-tpu/unigram-tokenizer-wikitext")
config = AutoConfig.from_pretrained("roberta-base")
config.vocab_size = tokenizer.vocab_size
model = TFAutoModelForMaskedLM.from_config(config)
```
Similarly, the optimizer also needs to be initialized under the same strategy scope with which the model is going to be further compiled. Going over the full training code isn’t something we want to do in this post, so we welcome you to read it [here](https://github.com/huggingface/transformers/blob/main/examples/tensorflow/language-modeling-tpu/run_mlm.py). Instead, let’s discuss another key point of — a TensorFlow-native data collator — [`DataCollatorForLanguageModeling`](https://huggingface.co/docs/transformers/main_classes/data_collator#transformers.DataCollatorForLanguageModeling).
`DataCollatorForLanguageModeling` is responsible for masking randomly selected tokens from the input sequence and preparing the labels. By default, we return the results from these collators as NumPy arrays. However, many collators also support returning these values as TensorFlow tensors if we specify `return_tensor="tf"`. This was crucial for our data pipeline to be compatible with TPU training.
Thankfully, TensorFlow provides seamless support for reading files from a GCS bucket:
```python
training_records = tf.io.gfile.glob(os.path.join(args.train_dataset, "*.tfrecord"))
```
If `args.dataset` contains the `gs://` identifier, TensorFlow will understand that it needs to look into a GCS bucket. Loading locally is as easy as removing the `gs://` identifier. For the rest of the data pipeline-related code, you can refer to [this section](https://github.com/huggingface/transformers/blob/474bf508dfe0d46fc38585a1bb793e5ba74fddfd/examples/tensorflow/language-modeling-tpu/run_mlm.py#L186-#L201) in the training script.
Once the datasets have been prepared, the model and the optimizer have been initialized, and the model has been compiled, we can do the community’s favorite - `model.fit()`. For training, we didn’t do extensive hyperparameter tuning. We just trained it for longer with a learning rate of 1e-4. We also leveraged the [`PushToHubCallback`](https://huggingface.co/docs/transformers/main_classes/keras_callbacks#transformers.PushToHubCallback) for model checkpointing and syncing them with the Hub. You can find the hyperparameter details and a trained model here: [https://huggingface.co/tf-tpu/roberta-base-epochs-500-no-wd](https://huggingface.co/tf-tpu/roberta-base-epochs-500-no-wd).
Once the model is trained, running inference with it is as easy as:
```python
from transformers import pipeline
model_id = "tf-tpu/roberta-base-epochs-500-no-wd"
unmasker = pipeline("fill-mask", model=model_id, framework="tf")
unmasker("Goal of my life is to [MASK].")
[{'score': 0.1003185287117958,
'token': 52,
'token_str': 'be',
'sequence': 'Goal of my life is to be.'},
{'score': 0.032648514956235886,
'token': 5,
'token_str': '',
'sequence': 'Goal of my life is to .'},
{'score': 0.02152673341333866,
'token': 138,
'token_str': 'work',
'sequence': 'Goal of my life is to work.'},
{'score': 0.019547373056411743,
'token': 984,
'token_str': 'act',
'sequence': 'Goal of my life is to act.'},
{'score': 0.01939118467271328,
'token': 73,
'token_str': 'have',
'sequence': 'Goal of my life is to have.'}]
```
## Conclusion
If there’s one thing we want to emphasize with this example, it’s that TPU training is **powerful, scalable and easy.** In fact, if you’re already using Transformers models with TF/Keras and streaming data from `tf.data`, you might be shocked at how little work it takes to move your whole training pipeline to TPU. They have a reputation as somewhat arcane, high-end, complex hardware, but they’re quite approachable, and instantiating a large pod slice is definitely easier than keeping multiple GPU servers in sync!
Diversifying the hardware that state-of-the-art models are trained on is going to be critical in the 2020s, especially if the ongoing GPU shortage continues. We hope that this guide will give you the tools you need to power cutting-edge training runs no matter what circumstances you face.
As the great poet GPT-4 once said:
*If you can keep your head when all around you*<br>
*Are losing theirs to GPU droughts,*<br>
*And trust your code, while others doubt you,*<br>
*To train on TPUs, no second thoughts;*<br>
*If you can learn from errors, and proceed,*<br>
*And optimize your aim to reach the sky,*<br>
*Yours is the path to AI mastery,*<br>
*And you'll prevail, my friend, as time goes by.*<br>
Sure, it’s shamelessly ripping off Rudyard Kipling and it has no idea how to pronounce “drought”, but we hope you feel inspired regardless.
| 2 |
0 | hf_public_repos | hf_public_repos/blog/inference-dgx-cloud.md | ---
title: "Serverless Inference with Hugging Face and NVIDIA NIM"
thumbnail: /blog/assets/train-dgx-cloud/thumbnail.jpg
authors:
- user: philschmid
- user: jeffboudier
---
# Serverless Inference with Hugging Face and NVIDIA NIM
Today, we are thrilled to announce the launch of **Hugging Face** **NVIDIA NIM API (serverless)**, a new service on the Hugging Face Hub, available to Enterprise Hub organizations. This new service makes it easy to use open models with the accelerated compute platform, of [NVIDIA DGX Cloud](https://www.nvidia.com/en-us/data-center/dgx-cloud) accelerated compute platform for inference serving. We built this solution so that Enterprise Hub users can easily access the latest NVIDIA AI technology in a serverless way to run inference on popular Generative AI models including Llama and Mistral, using standardized APIs and a few lines of code within the[ Hugging Face Hub](https://huggingface.co/models).
<div align="center">
<img src="/blog/assets/train-dgx-cloud/thumbnail.jpg" alt="Thumbnail">
</div>
## Serverless Inference powered by NVIDIA NIM
This new experience builds on our[ collaboration with NVIDIA](https://nvidianews.nvidia.com/news/nvidia-and-hugging-face-to-connect-millions-of-developers-to-generative-ai-supercomputing) to simplify the access and use of open Generative AI models on NVIDIA accelerated computing. One of the main challenges developers and organizations face is the upfront cost of infrastructure and the complexity of optimizing inference workloads for LLM. With Hugging Face NVIDIA NIM API (serverless), we offer an easy solution to these challenges, providing instant access to state-of-the-art open Generative AI models optimized for NVIDIA infrastructure with a simple API for running inference. The pay-as-you-go pricing model ensures that you only pay for the request time you use, making it an economical choice for businesses of all sizes.
NVIDIA NIM API (serverless) complements [Train on DGX Cloud](https://huggingface.co/blog/train-dgx-cloud), an AI training service already available on Hugging Face.
## How it works
Running serverless inference with Hugging Face models has never been easier. Here’s a step-by-step guide to get you started:
_Note: You need access to an Organization with a [Hugging Face Enterprise Hub](https://huggingface.co/enterprise) subscription to run Inference._
Before you begin, ensure you meet the following requirements:
1. You are member of an Enterprise Hub organization.
2. You have created a fine-grained token for your organization. Follow the steps below to create your token.
### Create a Fine-Grained Token
Fine-grained tokens allow users to create tokens with specific permissions for precise access control to resources and namespaces. First, go to[ Hugging Face Access Tokens](https://huggingface.co/settings/tokens) and click on “Create new Token” and select “fine-grained”.
<div align="center">
<img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/blog/inference-dgx-cloud/fine-grained-token-1.png" alt="Create Token">
</div>
Enter a “Token name” and select your Enterprise organization in “org permissions” as scope and then click “Create token”. You don’t need to select any additional scopes.
<div align="center">
<img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/blog/inference-dgx-cloud/fine-grained-token-2.png" alt="Scope Token">
</div>
Now, make sure to save this token value to authenticate your requests later.
### **Find your NIM**
You can find “NVIDIA NIM API (serverless)” on the model page of supported Generative AI models. You can find all supported models in this [NVIDIA NIM Collection](https://huggingface.co/collections/nvidia/nim-66a3c6fcdcb5bbc6e975b508), and in the Pricing section.
We will use the `meta-llama/Meta-Llama-3-8B-Instruct`. Go the [meta-llama/Meta-Llama-3-8B-Instruct](https://huggingface.co/meta-llama/Meta-Llama-3-8B-Instruct) model card open “Deploy” menu, and select “NVIDIA NIM API (serverless)” - this will open an interface with pre-generated code snippets for Python, Javascript or Curl.
<div align="center">
<img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/blog/inference-dgx-cloud/inference-modal.png" alt="inference-modal">
</div>
### **Send your requests**
NVIDIA NIM API (serverless) is standardized on the OpenAI API. This allows you to use the `openai’` sdk for inference. Replace the `YOUR_FINE_GRAINED_TOKEN_HERE` with your fine-grained token and you are ready to run inference.
```python
from openai import OpenAI
client = OpenAI(
base_url="https://huggingface.co/api/integrations/dgx/v1",
api_key="YOUR_FINE_GRAINED_TOKEN_HERE"
)
chat_completion = client.chat.completions.create(
model="meta-llama/Meta-Llama-3-8B-Instruct",
messages=[
{"role": "system", "content": "You are a helpful assistant."},
{"role": "user", "content": "Count to 500"}
],
stream=True,
max_tokens=1024
)
# Iterate and print stream
for message in chat_completion:
print(message.choices[0].delta.content, end='')
```
Congrats! 🎉 You can now start building your Generative AI applications using open models. 🔥
NVIDIA NIM API (serverless) currently only supports the `chat.completions.create` and `models.list` API. We are working on extending this while adding more models. The `models.list` can be used to check which models are currently available for Inference.
```python
models = client.models.list()
for m in models.data:
print(m.id)
```
## Supported Models and Pricing
Usage of Hugging Face NVIDIA NIM API (serverless) is billed based on the compute time spent per request. We exclusively use NVIDIA H100 Tensor Core GPUs, which are priced at $8.25 per hour. To make this easier to understand for per-request pricing, we can convert this to a per-second.
$8.25 per hour = $0.0023 per second (rounded to 4 decimal places)
The total cost for a request will depend on the model size, the number of GPUs required, and the time taken to process the request. Here's a breakdown of our current model offerings, their GPU requirements, typical response times, and estimated cost per request:
<table>
<tr>
<td><strong>Model ID</strong>
</td>
<td><strong>Number of NVIDIA H100 GPUs</strong>
</td>
<td><strong>Typical Response Time (500 input tokens, 100 output tokens)</strong>
</td>
<td><strong>Estimated Cost per Request</strong>
</td>
</tr>
<tr>
<td>meta-llama/Meta-Llama-3-8B-Instruct
</td>
<td>1
</td>
<td>1 seconds
</td>
<td>$0.0023
</td>
</tr>
<tr>
<td>meta-llama/Meta-Llama-3-70B-Instruct
</td>
<td>4
</td>
<td>2 seconds
</td>
<td>$0.0184
</td>
</tr>
<tr>
<td>meta-llama/Meta-Llama-3.1-405B-Instruct-FP8
</td>
<td>8
</td>
<td>5 seconds
</td>
<td>$0.0917
</td>
</tr>
</table>
Usage fees accrue to your Enterprise Hub Organizations’ current monthly billing cycle. You can check your current and past usage at any time within the billing settings of your Enterprise Hub Organization.
**Supported Models**
<table>
<tr>
<td><strong>Model ID</strong>
</td>
<td><strong>Number of H100 GPUs</strong>
</td>
</tr>
<tr>
<td><a href="https://huggingface.co/mistralai/Mixtral-8x22B-Instruct-v0.1">mistralai/Mixtral-8x22B-Instruct-v0.1</a>
</td>
<td>8
</td>
</tr>
<tr>
<td><a href="https://huggingface.co/mistralai/Mixtral-8x7B-Instruct-v0.1">mistralai/Mixtral-8x7B-Instruct-v0.1</a>
</td>
<td>2
</td>
</tr>
<tr>
<td><a href="https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.3">mistralai/Mistral-7B-Instruct-v0.3</a>
</td>
<td>2
</td>
</tr>
<tr>
<td><a href="https://huggingface.co/meta-llama/Meta-Llama-3.1-70B-Instruct">meta-llama/Meta-Llama-3.1-70B-Instruct</a>
</td>
<td>4
</td>
</tr>
<tr>
<td><a href="https://huggingface.co/meta-llama/Meta-Llama-3.1-8B-Instruct">meta-llama/Meta-Llama-3.1-8B-Instruct</a>
</td>
<td>1
</td>
</tr>
<tr>
<td><a href="https://huggingface.co/meta-llama/Meta-Llama-3-8B-Instruct">meta-llama/Meta-Llama-3-8B-Instruct</a>
</td>
<td>1
</td>
</tr>
<tr>
<td><a href="https://huggingface.co/meta-llama/Meta-Llama-3-70B-Instruct">meta-llama/Meta-Llama-3-70B-Instruct</a>
</td>
<td>4
</td>
</tr>
<tr>
<td><a href="https://huggingface.co/meta-llama/Meta-Llama-3.1-405B-Instruct-FP8">meta-llama/Meta-Llama-3.1-405B-Instruct-FP8</a>
</td>
<td>8
</td>
</tr>
</table>
## Accelerating AI Inference with NVIDIA TensorRT-LLM
We are excited to continue our collaboration with NVIDIA to push the boundaries of AI inference performance and accessibility. A key focus of our ongoing efforts is the integration of the NVIDIA TensorRT-LLM library into Hugging Face's Text Generation Inference (TGI) framework.
We'll be sharing more details, benchmarks, and best practices for using TGI with NVIDIA TensorRT-LLM in the near future. Stay tuned for more exciting developments as we continue to expand our collaboration with NVIDIA and bring more powerful AI capabilities to developers and organizations worldwide!
| 3 |
0 | hf_public_repos | hf_public_repos/blog/writer-case-study.md | ---
title: "Leveraging Hugging Face for complex generative AI use cases"
thumbnail: /blog/assets/78_ml_director_insights/writer.png
authors:
- user: jeffboudier
- user: wassemgtk
guest: true
---
# Leveraging Hugging Face for complex generative AI use casess
In this conversation, Jeff Boudier asks Waseem Alshikh, Co-founder and CTO of Writer, about their journey from a Hugging Face user, to a customer and now an open source model contributor.
- why was Writer started?
- what are the biggest misconceptions in Generative AI today?
- why is Writer now contributing open source models?
- what has been the value of the Hugging Face Expert Acceleration Program service for Writer?
- how it Writer approaching production on CPU and GPU to serve LLMs at scale?
- how important is efficiency and using CPUs for production?
<iframe width="100%" style="aspect-ratio: 16 / 9;" src="https://www.youtube-nocookie.com/embed/t8Ek1aOtaQw" title="YouTube video player" frameborder="0" allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture" allowfullscreen></iframe>
_If you’re interested in Hugging Face Expert Acceleration Program for your company, please contact us [here](https://huggingface.co/support#form) - our team will contact you to discuss your requirements!_
| 4 |
0 | hf_public_repos | hf_public_repos/blog/ethics-diffusers.md | ---
title: "Ethical Guidelines for developing the Diffusers library"
thumbnail: /blog/assets/ethics-diffusers/thumbnail.png
authors:
- user: giadap
---
# Ethical guidelines for developing the Diffusers library
We are on a journey to make our libraries more responsible, one commit at a time!
As part of the [Diffusers library documentation](https://huggingface.co/docs/diffusers/main/en/index), we are proud to announce the publication of an [ethical framework](https://huggingface.co/docs/diffusers/main/en/conceptual/ethical_guidelines).
Given diffusion models' real case applications in the world and potential negative impacts on society, this initiative aims to guide the technical decisions of the Diffusers library maintainers about community contributions. We wish to be transparent in how we make decisions, and above all, we aim to clarify what values guide those decisions.
We see ethics as a process that leverages guiding values, concrete actions, and continuous adaptation. For this reason, we are committed to adjusting our guidelines over time, following the evolution of the Diffusers project and the valuable feedback from the community that keeps it alive.
## Ethical guidelines
* **Transparency**: we are committed to being transparent in managing PRs, explaining our choices to users, and making technical decisions.
* **Consistency**: we are committed to guaranteeing our users the same level of attention in project management, keeping it technically stable and consistent.
* **Simplicity**: with a desire to make it easy to use and exploit the Diffusers library, we are committed to keeping the project’s goals lean and coherent.
* **Accessibility**: the Diffusers project helps lower the entry bar for contributors who can help run it even without technical expertise. Doing so makes research artifacts more accessible to the community.
* **Reproducibility**: we aim to be transparent about the reproducibility of upstream code, models, and datasets when made available through the Diffusers library.
* **Responsibility**: as a community and through teamwork, we hold a collective responsibility to our users by anticipating and mitigating this technology’s potential risks and dangers.
## Safety features and mechanisms
In addition, we provide a non-exhaustive - and hopefully continuously expanding! - list of safety features and mechanisms implemented by the Hugging Face team and the broader community.
* **[Community tab](https://huggingface.co/docs/hub/repositories-pull-requests-discussions)**: it enables the community to discuss and better collaborate on a project.
* **Tag feature**: authors of a repository can tag their content as being “Not For All Eyes”
* **Bias exploration and evaluation**: the Hugging Face team provides a [Space](https://huggingface.co/spaces/society-ethics/DiffusionBiasExplorer) to demonstrate the biases in Stable Diffusion and DALL-E interactively. In this sense, we support and encourage bias explorers and evaluations.
* **Encouraging safety in deployment**
* **[Safe Stable Diffusion](https://huggingface.co/docs/diffusers/main/en/api/pipelines/stable_diffusion_safe)**: It mitigates the well-known issue that models, like Stable Diffusion, that are trained on unfiltered, web-crawled datasets tend to suffer from inappropriate degeneration. Related paper: [Safe Latent Diffusion: Mitigating Inappropriate Degeneration in Diffusion Models](https://arxiv.org/abs/2211.05105).
* **Staged released on the Hub**: in particularly sensitive situations, access to some repositories should be restricted. This staged release is an intermediary step that allows the repository’s authors to have more control over its use.
* **Licensing**: [OpenRAILs](https://huggingface.co/blog/open_rail), a new type of licensing, allow us to ensure free access while having a set of restrictions that ensure more responsible use. | 5 |
0 | hf_public_repos | hf_public_repos/blog/dialog-agents.md | ---
title: "What Makes a Dialog Agent Useful?"
thumbnail: /blog/assets/dialog-agents/thumbnail.png
authors:
- user: nazneen
- user: natolambert
- user: VictorSanh
- user: ThomWolf
---
# What Makes a Dialog Agent Useful?
## The techniques behind ChatGPT: RLHF, IFT, CoT, Red teaming, and more
_This article has been translated to Chinese [简体中文](https://mp.weixin.qq.com/s/Xd5VtRP-ziH-PYFOci65Hg)_.
A few weeks ago, ChatGPT emerged and launched the public discourse into a set of obscure acronyms: RLHF, SFT, IFT, CoT, and more, all attributed to the success of ChatGPT. What are these obscure acronyms and why are they so important? We surveyed all the important papers on these topics to categorize these works, summarize takeaways from what has been done, and share what remains to be shown.
Let’s start by looking at the landscape of language model based conversational agents. ChatGPT is not the first, in fact many organizations published their language model dialog agents before OpenAI, including [Meta’s BlenderBot](https://arxiv.org/abs/2208.03188), [Google’s LaMDA](https://arxiv.org/abs/2201.08239), [DeepMind’s Sparrow](https://arxiv.org/abs/2209.14375), and [Anthropic’s Assistant](https://arxiv.org/abs/2204.05862) (_a continued development of this agent without perfect attribution is also known as Claude_). Some groups have also announced their plans to build a open-source chatbot and publicly shared a roadmap ([LAION’s Open Assistant](https://github.com/LAION-AI/Open-Assistant)); others surely are doing so and have not announced it.
The following table compares these AI chatbots based on the details of their public access, training data, model architecture, and evaluation directions. ChatGPT is not documented so we instead share details about InstructGPT which is a instruction fine-tuned model from OpenAI that is believed to have served as a foundation of ChatGPT.
| | LaMDA | BlenderBot 3 |Sparrow | ChatGPT/ InstructGPT | Assistant|
| --- | --- | --- | --- | --- | --- |
| **Org** | Google | Meta | DeepMind | OpenAI | Anthropic |
| **Access** | Closed | Open | Closed | Limited | Closed |
| **Size** | 137B | 175B | 70B | 175B | 52B |
| **Pre-trained<br>Base model** | Unknown | OPT | Chinchilla | GPT-3.5 | Unknown |
| **Pre-training corpora size** (# tokens) | 2.81T | 180B | 1.4T | Unknown | 400B |
| **Model can<br>access the web** | ✔ | ✔ | ✔ | ✖️ | ✖️ |
| **Supervised<br>fine-tuning** | ✔ | ✔ | ✔ | ✔ | ✔ |
| **Fine-tuning<br>data size** | Quality:6.4K<br>Safety: 8K<br>Groundedness: 4K<br>IR: 49K | 20 NLP datasets ranging from 18K to 1.2M | Unknown | 12.7K (for InstructGPT, likely much more for ChatGPT) | 150K + LM generated data |
| **RLHF** | ✖️ | ✖️ | ✔ | ✔ | ✔ |
| **Hand written rules for safety** | ✔ | ✖️ | ✔ | ✖️ | ✔ |
| **Evaluation criteria** | 1. Quality (sensibleness, specificity, interestingness)<br>2. Safety (includes bias) 3. Groundedness | 1, Quality (engagingness, use of knowledge)<br>2. Safety (toxicity, bias) | 1. Alignment (Helpful, Harmless, Correct)<br>2. Evidence (from web)<br>3. Rule violation<br>4. Bias and stereotypes<br>5. Trustworthiness | 1. Alignment (Helpful, Harmless, Truthfulness)<br>2. Bias | 1. Alignment (Helpful, Harmless, Honesty)<br>2. Bias |
| **Crowdsourcing platform used for data labeling**| U.S. based vendor | Amazon MTurk | Unknown | Upwork and Scale AI | Surge AI, Amazon MTurk, and Upwork |
We observe that albeit there are many differences in the training data, model, and fine-tuning, there are also some commonalities. One common goal for all the above chatbots is *instru*c*tion following ,* i.e., to follow user-specified instructions. For example, instructing ChatGPT to write a poem on fine-tuning.

### **********************************************************************************************From prediction text to following instructions:**********************************************************************************************
Usually, the language-modeling objective of the base model is not sufficient for a model to learn to follow a user’s direction in a helpful way. Model creators use **Instruction Fine-Tuning (IFT)** that involves fine-tuning the base model on demonstrations of written directions on a very diverse set of tasks, in addition to classical NLP tasks of sentiment, text classification, summarization etc. These instruction demonstrations are made up of three main components — the instruction, the inputs and the outputs. The inputs are optional, some tasks only require instructions such as open-ended generation as in the example above with ChatGPT. A input and output when present form an *instance*. There can be multiple instances of inputs and outputs for a given instruction. See below for examples (taken from [Wang et al., ‘22]).

Data for IFT is usually a collection of human-written instructions and instances of instructions bootstrapped using a language model. For bootstrapping, the LM is prompted (as in the figure above) in a few-shot setting with examples and instructed to generate new instructions, inputs, and outputs. In each round, the model is prompted with samples chosen from both human-written and model generated. The amount of human and model contributions to creating the dataset is a spectrum; see figure below.

On one end is the purely model-generated IFT dataset such as Unnatural Instructions ([Honovich et al., ‘22](https://arxiv.org/abs/2212.09689)) and on the other is a large community effort of hand-crafted instructions as in Super-natural instructions ([Wang et al., ‘22](https://arxiv.org/abs/2204.07705)). In between these two are works on using a small set of high quality seed dataset followed by bootstrapping such as Self-instruct ([Wang et al., 22](https://arxiv.org/pdf/2212.10560.pdf)). Yet another way of collating a dataset for IFT is to take the existing high-quality crowdsourced NLP datasets on various tasks (including prompting) and cast those as instructions using a unified schema or diverse templates. This line of work includes the T0 ([Sanh et al., ‘22](https://arxiv.org/pdf/2110.08207.pdf)), Natural instructions dataset ([Mishra et al., ‘22](https://arxiv.org/pdf/2104.08773.pdf)), the FLAN LM ([Wei et al., ‘22](https://arxiv.org/pdf/2109.01652.pdf)), and the OPT-IML ([Iyer et al.,’22](https://arxiv.org/pdf/2212.12017.pdf)).
### Safely following instructions
Instruction fine-tuned LMs, however, may not always generate responses that are ********helpful******** and **********safe.********** Examples of this kind of behavior include being evasive by always giving a unhelpful response such as “I’m sorry, I don’t understand. ” or generating an unsafe response to user inputs on a sensitive topic. To alleviate such behavior, model developers use **Supervised Fine-tuning (SFT),** fine-tuning the base language model on high-quality human annotated data for helpfulness and harmlessness. For example, see table below taken from the Sparrow paper (Appendix F).
SFT and IFT are very closely linked. Instruction tuning can be seen as a subset of supervised fine-tuning. In the recent literature, the SFT phase has often been utilized for safety topics, rather than instruction-specific topics, which is done after IFT. In the future, this taxonomy and delineation should mature into clearer use-cases and methodology.

Google’s LaMDA is also fine-tuned on a dialog dataset with safety annotations based on a set of rules (Appendix A). These rules are usually pre-defined and developed by model creators and encompass a wide set of topics including harm, discrimination, misinformation.
### Fine-tuning the models
On the other hand, Open AI’s InstructGPT, DeepMind’s Sparrow, and Anthropic’s Constitutional AI use human annotations of preferences in a setup called **reinforcement learning from human feedback (RLHF).** In RLHF, a set a model responses are ranked based on human feedback (e.g. choosing a text blurb that is preferred over another). Next, a preference model is trained on those annotated responses to return a scalar reward for the RL optimizer. Finally, the dialog agent is trained to simulate the preference model via reinforcement learning. See our previous [blog post](https://huggingface.co/blog/rlhf) on RLHF for more details.
**Chain-of-thought (CoT)** prompting ([Wei et al., ‘22](https://arxiv.org/abs/2201.11903)) is a special case of instruction demonstration that generates output by eliciting step-by-step reasoning from the dialog agent. Models fine-tuned with CoT use instruction datasets with human annotations of step-by-step reasoning. It’s the origin of the famous prompt, ***************************[let’s think step by step](https://arxiv.org/abs/2205.11916)***************************. The example below is taken from [Chung et al., ‘22](https://arxiv.org/pdf/2210.11416.pdf). The orange color highlights the instruction, the pink color shows the input and the output, and the blue color is the CoT reasoning.

Models fine-tuned with CoT have shown to perform much better on tasks involving commonsense, arithmetic, and symbolic reasoning as in [Chung et al., ‘22](https://arxiv.org/pdf/2210.11416.pdf).
CoT fine-tuning have also shown to be very effective for harmlessness (sometimes doing better than RLHF) without the model being evasive and generating “Sorry, I cannot respond to this question,” for prompts that are sensitive as shown by [Bai et al.,’22](https://www.anthropic.com/constitutional.pdf). See Appendix D of their paper for more examples.

## Takeaways:
1. You only need a very tiny fraction of data for instruction fine-tuning (order of few hundreds) compared to the pre-training data.
2. Supervised fine-tuning uses human annotations to make model outputs safer and helpful.
3. CoT fine-tuning improves model performance on tasks requiring step-by-step thinking and makes them less evasive on sensitive topics.
## Next steps for dialogue agents
This blog summarizes many of the existing work on what makes a dialog agent useful. But there are still many open questions yet to be explored. We list some of them here.
1. How important is RL in learning from human feedback? Can we get the performance of RLHF with training on higher quality data in IFT or SFT?
2. How does SFT+ RLHF as in Sparrow compare to just using SFT as in LaMDA for safety?
3. How much pre-training is necessary, given that we have IFT, SFT, CoT, and RLHF? What are the tradeoffs? What are the best base models people should use (both those publicly available, and not)?
4. Many of the models referenced in this paper have been carefully engineered with [red-teaming](https://arxiv.org/abs/2209.07858), where engineers specifically search for failure modes and influence future training (prompts and methods) based on unveiled issues. How do we systematically record the effects of these methods and reproduce them?
PS: Please let us know if you find any information in this blog missing or incorrect.
****************Citation****************
`Rajani et al., "What Makes a Dialog Agent Useful?", Hugging Face Blog, 2023.`
BibTeX citation:
```
@article{rajani2023ift,
author = {Rajani, Nazneen and Lambert, Nathan and Sanh, Victor and Wolf, Thomas},
title = {What Makes a Dialog Agent Useful?},
journal = {Hugging Face Blog},
year = {2023},
note = {https://huggingface.co/blog/dialog-agents},
}
```
| 6 |
0 | hf_public_repos | hf_public_repos/blog/universal_assisted_generation.md | ---
title: "Universal Assisted Generation: Faster Decoding with Any Assistant Model"
thumbnail: /blog/assets/optimum_intel/intel_thumbnail.png
authors:
- user: danielkorat
guest: true
org: Intel
- user: orenpereg
guest: true
org: Intel
- user: mber
guest: true
org: Intel
- user: jmamou
guest: true
org: Intel
- user: joaogante
- user: lewtun
- user: Nadav-Timor
guest: true
org: weizmannscience
- user: moshew
guest: true
org: Intel
---
# Universal Assisted Generation: Faster Decoding with Any Assistant Model
<em>TL;DR</em>: Many LLMs such as `gemma-2-9b` and `Mixtral-8x22B-Instruct-v0.1` lack a much smaller version to use for [assisted generation](https://huggingface.co/blog/assisted-generation). In this blog post, we present _Universal Assisted Generation_: a method developed by Intel Labs and Hugging Face which extends assisted generation to work with a small language model **from any model family** 🤯. As a result, it is now possible to accelerate inference from _any_ decoder or [Mixture of Experts](https://huggingface.co/blog/moe) model by **1.5x-2.0x** with almost zero overhead 🔥🔥🔥. Let's dive in!
## Introduction
Nowadays, the strongest open weight LLMs typically have billions to hundreds of billions parameters (hello Llama-3.1-405B 👋), and deploying these beasts in production environments poses a range of engineering challenges. One such challenge is that generating text from these large models is _slow_, which has prompted the community to develop a wide range of techniques to accelerate the decoding process. Assisted generation, also known as [speculative decoding](https://arxiv.org/abs/2211.17192), is a very popular and practical approach for accelerating LLM inference without accuracy loss. In this blog post, we take a look at how assisted generation works and share our research to extend it towards _any_ of the [140,000 language models](https://huggingface.co/models?pipeline_tag=text-generation&sort=trending) on the Hugging Face Hub 🚀!
## Assisted Generation
The core idea behind assisted generation involves using a pair of models, referred to as the _target_ and _assistant_ models. The assistant model is a smaller, more efficient version of the target model, for example you can use [`Llama-3.2-1B`](https://huggingface.co/meta-llama/Llama-3.2-1B) as the assistant model for the larger [`Llama-3.1-70b`](https://huggingface.co/meta-llama/Llama-3.1-70b) target model.
Assisted generation is an iterative process. Each cycle, the assistant model generates a sequence of tokens autoregressively, one at a time. The target model then verifies all the assistant tokens in the sequence in a single forward pass. The speedup is achieved by confirming multiple tokens in each forward pass of the target model, rather than producing just one token at a time. For a more detailed explanation, see the original [blog post](https://huggingface.co/blog/assisted-generation). Combined with the recently introduced [Dynamic Speculation](https://huggingface.co/blog/dynamic_speculation_lookahead) strategy, assisted generation accelerates text generation by 1.5x-3x, depending on the task and the models used.
The remarkable speedups offered by assisted generation come with a significant drawback: the target and assistant models must share the same tokenizer, meaning they need to be from the same model family. However, many widely-used models lack smaller versions that are both compact and accurate enough to deliver substantial latency reductions. Based on our experience, meaningful speedups are typically seen when the assistant model is at least 50-100 times smaller than the target one. For instance, [`CodeLlama-13b`](https://huggingface.co/meta-llama/CodeLlama-13b-Instruct-hf) lacks a smaller version, and [`gemma-2-9b`](https://huggingface.co/google/gemma-2-9b) only has a `2b` variant which is still not sufficiently small/fast to achieve significant performance improvements.
## Universal Assisted Generation
In order to mitigate this pain point, Intel Labs, together with our friends at Hugging Face, has developed Universal Assisted Generation (UAG). UAG enables selecting any pair of target and assistant models regardless of their tokenizer. For example, `gemma-2-9b` can be used as the target model, with the tiny [`vicuna-68m`](https://huggingface.co/double7/vicuna-68m) as the assistant.
The main idea behind the method we propose is 2-way tokenizer translations. Once the assistant model completes a generation iteration, the assistant tokens are converted to text, which is then tokenized using the target model's tokenizer to generate target tokens. After the verification step, the target tokens are similarly converted back to the assistant tokens format, which are then appended to the assistant model's context before the next iteration begins.
Since the assistant and target tokenizers use different vocabularies it's necessary to handle the discrepancies between them. To accurately re-encode the newly generated assistant tokens, it’s essential to prepend a context window consisting of several previous tokens. This entire sequence is then re-encoded into the target token format and aligned with the most recent target tokens to pinpoint the exact location where the newly generated tokens should be appended. This process is illustrated in the video below.
<!-- [GIF 1 -- FWD PASS] -->
<figure class="image table text-center m-0 w-full">
<video
style="max-width: 80%; margin: auto;"
autoplay loop muted playsinline
src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/blog/universal-assisted-generation/method-animation.mov"
></video>
</figure>
While not shown in the video above, token re-encoding from target to assistant follows a similar process. However, mismatched tokens must be discarded from the assistant model's key-value (KV) cache to ensure data integrity.
## Benchmarks
The table below shows the latency improvements observed for target models when paired with assistant models using different tokenizers.
| Target model | Assistant model | Dataset | Task | Speedup |
|----------------------|---------------------|---------------------------|---------------------------|---------------------------|
| `codellama/CodeLlama-13b-Instruct-hf` | `bigcode/tiny_starcoder_py` | [`openai/humaneval`](https://huggingface.co/openai/humaneval) | code generation | **1.90x** |
| [`mistralai/Mixtral-8x22B-Instruct-v0.1`](mistralai/Mixtral-8x22B-Instruct-v0.1) | `double7/vicuna-68m` | [`cnn_dailymail`](https://huggingface.co/cnn_dailymail) | summarization | **1.52x** |
| `google/gemma-2-9b` | `double7/vicuna-68m` | [`cnn_dailymail`](https://huggingface.co/cnn_dailymail) | summarization | **1.76x** |
| `mistralai/Mixtral-8x22B-Instruct-v0.1` | `Qwen/Qwen2-0.5B-Instruct` | [`tau/scrolls`](https://huggingface.co/tau/scrolls) | long-context summarization | **1.78x** |
| `meta-llama/Llama-3.1-70B` | `Qwen/Qwen2-0.5B-Instruct` | [`tau/scrolls`](https://huggingface.co/tau/scrolls) | long-context summarization | **1.78x** |
| `microsoft/Phi-3-medium-128k-instruct` | `Qwen/Qwen2-0.5B-Instruct` | [`tau/scrolls`](https://huggingface.co/tau/scrolls) | long-context summarization | **1.91x** |
Note that the target models above do not have small variants (under 1 billion parameters) which are suitable for acceleration using standard assisted generation.
Each experiment was conducted on 100 randomly selected examples.
Experiments with `Llama` and `Mixtral` target models use 2 and 4 A100 GPUs, respectively. All other experiments ran with a single A6000 GPU.
## Code
Universal assisted generation has been integrated into release [4.46.0](https://github.com/huggingface/transformers/releases/tag/v4.46.0) of 🤗 Transformers.
To use, pass `tokenizer` and `assistant_tokenizer` to `generate()`:
```python
>>> from transformers import AutoModelForCausalLM, AutoTokenizer
>>> prompt = "Alice and Bob"
>>> checkpoint = "google/gemma-2-9b"
>>> assistant_checkpoint = "double7/vicuna-68m"
>>> assistant_tokenizer = AutoTokenizer.from_pretrained(assistant_checkpoint)
>>> tokenizer = AutoTokenizer.from_pretrained(checkpoint)
>>> inputs = tokenizer(prompt, return_tensors="pt")
>>> model = AutoModelForCausalLM.from_pretrained(checkpoint)
>>> assistant_model = AutoModelForCausalLM.from_pretrained(assistant_checkpoint)
>>> outputs = model.generate(**inputs, assistant_model=assistant_model, tokenizer=tokenizer, assistant_tokenizer=assistant_tokenizer)
>>> tokenizer.batch_decode(outputs, skip_special_tokens=True)
['Alice and Bob are sitting in a bar. Alice is drinking a beer and Bob is drinking a']
```
## Future Directions
While passing `do_sample=True` with standard assisted generation uses the speculative sampling algorithm ([Algorithm 1 from the paper](https://arxiv.org/pdf/2211.17192.pdf)), UAG
currently supports multinomial sampling only. In multinomial sampling, if the target model doesn't sample the same token as the assistant, the token is automatically rejected, which is not the case with speculative sampling. In practice, this means that UAG with `do_sample=True` will have a lower throughput compared to the case where the assistant has the same tokenizer. In the future, we plan to add support for speculative sampling with UAG.
In addition, we intend to integrate UAG into 🤗 Transformers pipelines, for a more concise and streamlined usage.
## References
- [Fast Inference from Transformers via Speculative Decoding](https://arxiv.org/pdf/2211.17192)
- [Assisted Generation: a new direction toward low-latency text generation](https://huggingface.co/blog/assisted-generation)
| 7 |
0 | hf_public_repos | hf_public_repos/blog/ml-director-insights-2.md | ---
title: "Director of Machine Learning Insights [Part 2: SaaS Edition]"
thumbnail: /blog/assets/67_ml_director_insights/thumbnail.png
authors:
- user: britneymuller
---
# Director of Machine Learning Insights [Part 2: SaaS Edition]
_If you or your team are interested in building ML solutions faster visit [hf.co/support](https://huggingface.co/support?utm_source=article&utm_medium=blog&utm_campaign=ml_director_insights_2) today!_
👋 Welcome to Part 2 of our Director of Machine Learning Insights [Series]. Check out [Part 1 here.](https://huggingface.co/blog/ml-director-insights)
Directors of Machine Learning have a unique seat at the AI table spanning the perspective of various roles and responsibilities. Their rich knowledge of ML frameworks, engineering, architecture, real-world applications and problem-solving provides deep insights into the current state of ML. For example, one director will note how using new transformers speech technology decreased their team’s error rate by 30% and how simple thinking can help save _a lot_ of computational power.
Ever wonder what directors at Salesforce or ZoomInfo currently think about the state of Machine Learning? What their biggest challenges are? And what they're most excited about? Well, you're about to find out!
In this second SaaS focused installment, you’ll hear from a deep learning for healthcare textbook author who also founded a non-profit for mentoring ML talent, a chess fanatic cybersecurity expert, an entrepreneur whose business was inspired by Barbie’s need to monitor brand reputation after a lead recall, and a seasoned patent and academic paper author who enjoys watching his 4 kids make the same mistakes as his ML models.
🚀 Let’s meet some top Machine Learning Directors in SaaS and hear what they have to say about Machine Learning:
<img class="mx-auto" style="float: left;" padding="5px" width="200" src="/blog/assets/67_ml_director_insights/Omar-Rahman.jpeg"></a>
### [Omar Rahman](https://www.linkedin.com/in/omar-rahman-4739713a/) - Director of Machine Learning at [Salesforce](https://www.salesforce.com/)
**Background:** Omar leads a team of Machine Learning and Data Engineers in leveraging ML for defensive security purposes as part of the Cybersecurity team. Previously, Omar has led data science and machine learning engineering teams at Adobe and SAP focusing on bringing intelligent capabilities to marketing cloud and procurement applications. Omar holds a Master’s degree in Electrical Engineering from Arizona State University.
**Fun Fact:** Omar loves to play chess and volunteers his free time to guide and mentor graduate students in AI.
**Salesforce:** World's #1 customer relationship management software.
#### **1. How has ML made a positive impact on SaaS?**
ML has benefited SaaS offerings in many ways.
**a. Improving automation within applications:** For example, a service ticket router using NLP (Natural Language Processing) to understand the context of the service request and routing it to the appropriate team within the organization.
**b. Reduction in code complexity:** Rules-based systems tend to get unwieldy as new rules are added, thereby increasing maintenance costs. For example, An ML-based language translation system is more accurate and robust with much fewer lines of code as compared to previous rules-based systems.
**c. Better forecasting results in cost savings.** Being able to forecast more accurately helps in reducing backorders in the supply chain as well as cost savings due to a reduction in storage costs.
#### **2. What are the biggest ML challenges within SaaS?**
a. Productizing ML applications require a lot more than having a model. Being able to leverage the model for serving results, detecting and adapting to changes in statistics of data, etc. creates significant overhead in deploying and maintaining ML systems.
b. In most large organizations, data is often siloed and not well maintained resulting in significant time spent in consolidating data, pre-processing, data cleaning activities, etc., thereby resulting in a significant amount of time and effort needed to create ML-based applications.
#### **3. What’s a common mistake you see people make trying to integrate ML into SaaS?**
Not focussing enough on the business context and the problem being solved, rather trying to use the latest and greatest algorithms and newly open-sourced libraries. A lot can be achieved by simple traditional ML techniques.
#### **4. What excites you most about the future of ML?**
Generalized artificial intelligence capabilities, if built and managed well, have the capability to transform humanity in more ways than one can imagine. My hope is that we will see great progress in the areas of healthcare and transportation. We already see the benefits of AI in radiology resulting in significant savings in manpower thereby enabling humans to focus on more complex tasks. Self-driving cars and trucks are already transforming the transportation sector.
<img class="mx-auto" style="float: left;" padding="5px" width="200" src="/blog/assets/67_ml_director_insights/Cao-Danica-Xiao.jpeg"></a>
### [Cao (Danica) Xiao](https://www.linkedin.com/in/caoxiao/) - Senior Director of Machine Learning at [Amplitude](https://amplitude.com/)
**Background:** Cao (Danica) Xiao is the Senior Director and Head of Data Science and Machine Learning at Amplitude. Her team focuses on developing and deploying self-serving machine learning models and products based on multi-sourced user data to solve critical business challenges regarding digital production analytics and optimization. Besides, she is a passionate machine learning researcher with over 95+ papers published in leading CS venues. She is also a technology leader with extensive experience in machine learning roadmap creation, team building, and mentoring.
Prior to Amplitude, Cao (Danica) was the Global Head of Machine Learning in the Analytics Center of Excellence of IQVIA. Before that, she was a research staff member at IBM Research and research lead at MIT-IBM Watson AI Lab. She got her Ph.D. degree in machine learning from the University of Washington, Seattle. Recently, she also co-authored a textbook on deep learning for healthcare and founded a non-profit organization for mentoring machine learning talents.
**Fun Fact:** Cao is a cat-lover and is a mom to two cats: one Singapura girl and one British shorthair boy.
**Amplitude:** A cloud-based product-analytics platform that helps customers build better products.
#### **1. How has ML made a positive impact on SaaS?**
ML plays a game-changing role in turning massive noisy machine-generated or user-generated data into answers to all kinds of business questions including personalization, prediction, recommendation, etc. It impacts a wide spectrum of industry verticals via SaaS.
#### **2. What are the biggest ML challenges within SaaS?**
Lack of data for ML model training that covers a broader range of industry use cases. While being a general solution for all industry verticals, still need to figure out how to handle the vertical-specific needs arising from business, or domain shift issue that affects ML model quality.
#### **3. What’s a common mistake you see people make trying to integrate ML into a SaaS product?**
Not giving users the flexibility to incorporate their business knowledge or other human factors that are critical to business success. For example, for a self-serve product recommendation, it would be great if users could control the diversity of recommended products.
#### **4. What excites you most about the future of ML?**
ML has seen tremendous success. It also evolves rapidly to address the current limitations (e.g., lack of data, domain shift, incorporation of domain knowledge).
More ML technologies will be applied to solve business or customer needs. For example, interpretable ML for users to understand and trust the ML model outputs; counterfactual prediction for users to estimate the alternative outcome should they make a different business decision.
<img class="mx-auto" style="float: left;" padding="5px" width="200" src="/blog/assets/67_ml_director_insights/Raphael-Cohen.jpeg"></a>
### [Raphael Cohen](https://www.linkedin.com/in/raphael-cohen-63a87779/) - Director of the Machine Learning at [ZoomInfo](https://www.zoominfo.com/)
**Background:** Raphael has a Ph.D. in the field of understanding health records and genetics, has authored 20 academic papers and has 8 patents. Raphael is also a leader in Data Science and Research with a background in NLP, Speech, healthcare, sales, customer journeys, and IT.
**Fun Fact:** Raphael has 4 kids and enjoys seeing them learn and make the same mistakes as some of his ML models.
**ZoomInfo:** Intelligent sales and marketing technology backed by the world's most comprehensive business database.
#### **1. How has ML made a positive impact on SaaS**
Machine Learning has facilitated the transcription of conversational data to help people unlock new insights and understandings. People can now easily view the things they talked about, summarized goals, takeaways, who spoke the most, who asked the best questions, what the next steps are, and more. This is incredibly useful for many interactions like email and video conferencing (which are more common now than ever).
With [Chorus.ai](chorus.ai) we transcribe conversations as they are being recorded in real-time. We use an algorithm called [Wave2Vec](https://arxiv.org/abs/1904.05862) to do this. 🤗 [Hugging Face recently released their own Wave2Vec](https://huggingface.co/docs/transformers/model_doc/wav2vec2) version created for training that we derived a lot of value from. This new generation of transformers speech technology is incredibly powerful, it has decreased our error rate by 30%.
Once we transcribe a conversation we can look into the content - this is where NLP comes in and we rely heavily on [Hugging Face Transformers](https://huggingface.co/docs/transformers/index) to allow us to depict around 20 categories of topics inside recordings and emails; for example, are we talking about pricing, signing a contract, next steps, all of these topics are sent through email or discussed and it’s easy to now extract that info without having to go back through all of your conversations.
This helps make people much better at their jobs.
#### **2. What are the biggest ML challenges within SaaS?**
The biggest challenge is understanding when to make use of ML.
What problems can we solve with ML and which shouldn’t we? A lot of times we have a breakthrough with an ML model but a computationally lighter heuristic model is better suited to solve the problem we have.
This is where a strong AI strategy comes into play. —Understand how you want your final product to work and at what efficiency.
We also have the question of how to get the ML models you’ve built into production with a low environmental/computational footprint? Everyone is struggling with this; how to keep models in production in an efficient way without burning too many resources.
A great example of this was when we moved to the Wav2Vec framework, which required us to break down our conversational audio into 15sec segments that get fed into this huge model. During this, we discovered that we were feeding the model a lot of segments that were pure silence. This is common when someone doesn’t show up or one person is waiting for another to join a meeting.
Just by adding another very light model to tell us when not to send the silent segments into this big complicated ML model, we are able to save a lot of computational power/energy. This is an example of where engineers can think of other easier ways to speed up and save on model production. There’s an opportunity for more engineers to be savvier and better optimize models without burning too many resources.
#### **3. What’s a common mistake you see people make trying to integrate ML into SaaS?**
Is my solution the smartest solution? Is there a better way to break this down and solve it more efficiently?
When we started identifying speakers we went directly with an ML method and this wasn’t as accurate as the video conference provider data.
Since then we learned that the best way to do this is to start with the metadata of who speaks from the conference provider and then overlay that with a smart embedding model. We lost precious time during this learning curve. We shouldn’t have used this large ML solution if we stopped to understand there are other data sources we should invest in that will help us accelerate more efficiently.
Think outside the box and don’t just take something someone built and think I have an idea of how to make this better. Where can we be smarter by understanding the problem better?
#### **4. What excites you most about the future of ML?**
I think we are in the middle of another revolution. For us, seeing our error rates drop by 30% by our Wave2Vec model was amazing. We had been working for years only getting 1% drops at each time and then within 3 months' time we saw such a huge improvement and we know that’s only the beginning.
In academia, bigger and smarter things are happening. These pre-trained models are allowing us to do things we could never imagine before. This is very exciting!
We are also seeing a lot of tech from NLP entering other domains like speech and vision and being able to power them.
Another thing I’m really excited about is generating models! We recently worked with a company called [Bria.ai](https://bria.ai/) and they use these amazing GANs to create images. So you take a stock photo and you can turn it into a different photo by saying “remove glasses”, “add glasses” or “add hair” and it does so perfectly. The idea is that we can use this to generate data. We can take images of people from meetings not smiling and we can make them smile in order to build a data set for smile detection. This will be transformative. You can take 1 image and turn it into 100 images. This will also apply to speech generation which could be a powerful application within the service industry.
#### **Any final thoughts?**
–It’s challenging to put models into production. Believe data science teams need engineering embedded with them. Engineers should be part of the AI team. This will be an important structural pivot in the future.
<img class="mx-auto" style="float: left;" padding="5px" width="200" src="/blog/assets/67_ml_director_insights/Martin-Ostrovsky.jpeg"></a>
### [Martin Ostrovsky](https://www.linkedin.com/in/martinostrovsky/) Founder/CEO & Machine Learning Director at [Repustate Inc.](https://www.repustate.com/)
**Background:** Martin is passionate about AI, ML, and NLP and is responsible for guiding the strategy and success of all Repustate products by leading the cross-functional team responsible for developing and improving them. He sets the strategy, roadmap, and feature definition for Repustate’s Global Text Analytics API, Sentiment Analysis, Deep Search, and Named Entity Recognition solutions. He has a Bachelor's degree in Computer Science from York University and earned his Master of Business Administration from the Schulich School of Business.
**Fun Fact:** The first application of ML I used was for Barbie toys. My professor at Schulich Business School mentioned that Barbie needed to monitor their brand reputation due to a recall of the toys over concerns of excessive lead in them. Hiring people to manually go through each social post and online article seemed just so inefficient and ineffective to me. So I proposed to create a machine learning algorithm that would monitor what people thought of them from across all social media and online channels. The algorithm worked seamlessly. And that’s how I decided to name my company, Repustate - the “state” of your “repu”tation. 🤖
**Repustate:** A leading provider of text analytics services for enterprise companies.
#### **1. Favorite ML business application?**
My favorite ML application is cybersecurity.
Cybersecurity remains the most critical part for any company (government or non-government) with regard to data. Machine Learning helps identify cyber threats, fight cyber-crime, including cyberbullying, and allows for a faster response to security breaches. ML algorithms quickly analyze the most likely vulnerabilities and potential malware and spyware applications based on user data. They can spot distortion in endpoint entry patterns and identify it as a potential data breach.
#### **2. What is your biggest ML challenge?**
The biggest ML challenge is audio to text transcription in the Arabic Language. There are quite a few systems that can decipher Arabic but they lack accuracy. Arabic is the official language of 26 countries and has 247 million native speakers and 29 million non-native speakers. It is a complex language with a rich vocabulary and many dialects.
The sentiment mining tool needs to read data directly in Arabic if you want accurate insights from Arabic text because otherwise nuances are lost in translations. Translating text to English or any other language can completely change the meaning of words in Arabic, including even the root word. That’s why the algorithm needs to be trained on Arabic datasets and use a dedicated Arabic part-of-speech tagger. Because of these challenges, most companies fail to provide accurate Arabic audio to text translation to date.
#### **3. What’s a common mistake you see people make trying to integrate ML?**
The most common mistake that companies make while trying to integrate ML is insufficient data in their training datasets. Most ML models cannot distinguish between good data and insufficient data. Therefore, training datasets are considered relevant and used as a precedent to determine the results in most cases. This challenge isn’t limited to small- or medium-sized businesses; large enterprises have the same challenge.
No matter what the ML processes are, companies need to ensure that the training datasets are reliable and exhaustive for their desired outcome by incorporating a human element into the early stages of machine learning.
However, companies can create the required foundation for successful machine learning projects with a thorough review of accurate, comprehensive, and constant training data.
#### **4. Where do you see ML having the biggest impact in the next 5-10 years?**
In the next 5-10 years, ML will have the biggest impact on transforming the healthcare sector.
**Networked hospitals and connected care:**
With predictive care, command centers are all set to analyze clinical and location data to monitor supply and demand across healthcare networks in real-time. With ML, healthcare professionals will be able to spot high-risk patients more quickly and efficiently, thus removing bottlenecks in the system. You can check the spread of contractible diseases faster, take better measures to manage epidemics, identify at-risk patients more accurately, especially for genetic diseases, and more.
**Better staff and patient experiences:**
Predictive healthcare networks are expected to reduce wait times, improve staff workflows, and take on the ever-growing administrative burden. By learning from every patient, diagnosis, and procedure, ML is expected to create experiences that adapt to hospital staff as well as the patient. This improves health outcomes and reduces clinician shortages and burnout while enabling the system to be financially sustainable.
---
🤗 Thank you for joining us in this second installment of ML Director Insights. Stay tuned for more insights from ML Directors in Finance, Healthcare and e-Commerce.
Big thanks to Omar Rahman, Cao (Danica) Xiao, Raphael Cohen, and Martin Ostrovsky for their brilliant insights and participation in this piece. We look forward to watching each of your continued successes and will be cheering you on each step of the way. 🎉
If you or your team are interested in accelerating your ML roadmap with Hugging Face Experts please visit [hf.co/support](https://huggingface.co/support?utm_source=article&utm_medium=blog&utm_campaign=ml_director_insights_2) to learn more.
| 8 |
0 | hf_public_repos | hf_public_repos/blog/amd_pervasive_developer_ai_contest.md | ---
title: "AMD Pervasive AI Developer Contest!"
thumbnail: assets/amd_pervasive_developer_ai_contest/amd_developer_general_abstract.jpg
authors:
- user: guruprasadmp
guest: true
---
# AMD Pervasive AI Developer Contest
AMD and Hugging Face are actively engaged in helping developers seamlessly deploy cutting-edge AI models on AMD hardware.
This year, AMD takes their commitment one step further by providing developers free, hands-on access to state-of-the-art AMD hardware through their recently announced [Pervasive AI Developer Contest](https://www.hackster.io/contests/amd2023#challengeNav).
This global competition is an incubator of AI innovation, beckoning developers worldwide to create unique AI applications.
Developers can choose from three exciting categories: Generative AI, Robotics AI, and PC AI, each of them entitled to cash prices up to $10,000 USD for winners, with a total of $160,000 USD being given away.
700 AMD platforms are up for grabs to eligible participants.
Don’t miss your chance to receive an AMD Radeon ™ PRO W7900, AMD Kria ™ KR260 Robotics Starter Kit, Ryzen ™ AI powered PC or cloud access to an AMD Instinct ™ MI210 accelerator card.
## AMD + Hugging Face Collaboration
For those focusing on large language model development, Hugging Face and AMD have made significant strides to provide out-of-the-box support on AMD GPUs.
Our combined efforts include the ability to run HF transformer models without the need for code modifications allowing for seamless operation.
On top of native support, additional acceleration tools like ONNX models execution on ROCm-powered GPU, Optimum-Benchmark, DeepSpeed for ROCm-powered GPUs using Transformers, GPTQ, TGI and more are supported.
Additionally, for those applying for the PC AI contest category to develop on AMD Ryzen AI Powered PCs, we are continuously growing our pre-trained model zoo to support a wide variety of models enabling developers to get started in building AI applications swiftly.
## Sign Up Today
We invite you to be innovative, and to contribute to shaping what AI can achieve and we at Hugging Face look forward to the new solutions this contest will bring to light.
To participate, please register over [here](https://www.hackster.io/contests/amd2023#challengeNav)
| 9 |
0 | hf_public_repos/api-inference-community/docker_images/common/app | hf_public_repos/api-inference-community/docker_images/common/app/pipelines/image_to_image.py | from typing import TYPE_CHECKING, Optional
from app.pipelines import Pipeline
if TYPE_CHECKING:
from PIL import Image
class ImageToImagePipeline(Pipeline):
def __init__(self, model_id: str):
# IMPLEMENT_THIS
# Preload all the elements you are going to need for inference.
# For instance your model, processors, tokenizer that might be needed.
# This function is only called once, so do all the heavy processing I/O here
raise NotImplementedError(
"Please implement ImageToImagePipeline.__init__ function"
)
def __call__(self, image: Image.Image, prompt: Optional[str] = "") -> "Image.Image":
"""
Args:
image (:obj:`PIL.Image.Image`):
a condition image
prompt (:obj:`str`, *optional*):
a string containing some text
Return:
A :obj:`PIL.Image` with the raw image representation as PIL.
"""
# IMPLEMENT_THIS
raise NotImplementedError(
"Please implement ImageToImagePipeline.__call__ function"
)
| 0 |
0 | hf_public_repos/api-inference-community/docker_images/common/app | hf_public_repos/api-inference-community/docker_images/common/app/pipelines/audio_to_audio.py | from typing import List, Tuple
import numpy as np
from app.pipelines import Pipeline
class AudioToAudioPipeline(Pipeline):
def __init__(self, model_id: str):
# IMPLEMENT_THIS
# Preload all the elements you are going to need at inference.
# For instance your model, processors, tokenizer that might be needed.
# This function is only called once, so do all the heavy processing I/O here
# IMPLEMENT_THIS : Please define a `self.sampling_rate` for this pipeline
# to automatically read the input correctly
self.sampling_rate = 16000
raise NotImplementedError(
"Please implement AudioToAudioPipeline __init__ function"
)
def __call__(self, inputs: np.array) -> Tuple[np.array, int, List[str]]:
"""
Args:
inputs (:obj:`np.array`):
The raw waveform of audio received. By default sampled at `self.sampling_rate`.
The shape of this array is `T`, where `T` is the time axis
Return:
A :obj:`tuple` containing:
- :obj:`np.array`:
The return shape of the array must be `C'`x`T'`
- a :obj:`int`: the sampling rate as an int in Hz.
- a :obj:`List[str]`: the annotation for each out channel.
This can be the name of the instruments for audio source separation
or some annotation for speech enhancement. The length must be `C'`.
"""
# IMPLEMENT_THIS
raise NotImplementedError(
"Please implement AudioToAudioPipeline __call__ function"
)
| 1 |
0 | hf_public_repos/api-inference-community/docker_images/common/app | hf_public_repos/api-inference-community/docker_images/common/app/pipelines/text_classification.py | from typing import Dict, List
from app.pipelines import Pipeline
class TextClassificationPipeline(Pipeline):
def __init__(
self,
model_id: str,
):
# IMPLEMENT_THIS
# Preload all the elements you are going to need at inference.
# For instance your model, processors, tokenizer that might be needed.
# This function is only called once, so do all the heavy processing I/O here
raise NotImplementedError(
"Please implement TextClassificationPipeline __init__ function"
)
def __call__(self, inputs: str) -> List[Dict[str, float]]:
"""
Args:
inputs (:obj:`str`):
a string containing some text
Return:
A :obj:`list`:. The object returned should be a list of one list like [[{"label": 0.9939950108528137}]] containing:
- "label": A string representing what the label/class is. There can be multiple labels.
- "score": A score between 0 and 1 describing how confident the model is for this label/class.
"""
# IMPLEMENT_THIS
raise NotImplementedError(
"Please implement TextClassificationPipeline __call__ function"
)
| 2 |
0 | hf_public_repos/api-inference-community/docker_images/common/app | hf_public_repos/api-inference-community/docker_images/common/app/pipelines/speech_segmentation.py | from typing import Dict
import numpy as np
from app.pipelines import Pipeline
class SpeechSegmentationPipeline(Pipeline):
def __init__(self, model_id: str):
# IMPLEMENT_THIS
# Preload all the elements you are going to need at inference.
# For instance your model, processors, tokenizer that might be needed.
# This function is only called once, so do all the heavy processing I/O here
# IMPLEMENT_THIS : Please define a `self.sampling_rate` for this pipeline
# to automatically read the input correctly
self.sampling_rate = 16000
raise NotImplementedError(
"Please implement SpeechSegmentationPipeline __init__ function"
)
def __call__(self, inputs: np.array) -> Dict[str, str]:
"""
Args:
inputs (:obj:`np.array`):
The raw waveform of audio received. By default at self.sampling_rate, otherwise 16KHz.
Return:
A :obj:`list`:. Each item in the list is like {"class": "XXX", "start": float, "end": float}
"class" is the associated class of the audio segment, "start" and "end" are markers expressed in seconds
within the audio file.
"""
# IMPLEMENT_THIS
# api_inference_community.normalizers.speaker_diarization_normalize could help.
raise NotImplementedError(
"Please implement SpeechSegmentationPipeline __call__ function"
)
| 3 |
0 | hf_public_repos/api-inference-community/docker_images/common/app | hf_public_repos/api-inference-community/docker_images/common/app/pipelines/summarization.py | from typing import Dict, List
from app.pipelines import Pipeline
class SummarizationPipeline(Pipeline):
def __init__(self, model_id: str):
# IMPLEMENT_THIS
# Preload all the elements you are going to need at inference.
# For instance your model, processors, tokenizer that might be needed.
# This function is only called once, so do all the heavy processing I/O here
raise NotImplementedError(
"Please implement SummarizationPipeline __init__ function"
)
def __call__(self, inputs: str) -> List[Dict[str, str]]:
"""
Args:
inputs (:obj:`str`): a string to be summarized
Return:
A :obj:`list` of :obj:`dict` in the form of {"summary_text": "The string after summarization"}
"""
# IMPLEMENT_THIS
raise NotImplementedError(
"Please implement SummarizationPipeline __init__ function"
)
| 4 |
0 | hf_public_repos/api-inference-community/docker_images/common/app | hf_public_repos/api-inference-community/docker_images/common/app/pipelines/automatic_speech_recognition.py | from typing import Dict
import numpy as np
from app.pipelines import Pipeline
class AutomaticSpeechRecognitionPipeline(Pipeline):
def __init__(self, model_id: str):
# IMPLEMENT_THIS
# Preload all the elements you are going to need at inference.
# For instance your model, processors, tokenizer that might be needed.
# This function is only called once, so do all the heavy processing I/O here
# IMPLEMENT_THIS : Please define a `self.sampling_rate` for this pipeline
# to automatically read the input correctly
self.sampling_rate = 16000
raise NotImplementedError(
"Please implement AutomaticSpeechRecognitionPipeline __init__ function"
)
def __call__(self, inputs: np.array) -> Dict[str, str]:
"""
Args:
inputs (:obj:`np.array`):
The raw waveform of audio received. By default at self.sampling_rate, otherwise 16KHz.
Return:
A :obj:`dict`:. The object return should be liked {"text": "XXX"} containing
the detected language from the input audio
"""
# IMPLEMENT_THIS
raise NotImplementedError(
"Please implement AutomaticSpeechRecognitionPipeline __call__ function"
)
| 5 |
0 | hf_public_repos/api-inference-community/docker_images/common/app | hf_public_repos/api-inference-community/docker_images/common/app/pipelines/token_classification.py | from typing import Any, Dict, List
from app.pipelines import Pipeline
class TokenClassificationPipeline(Pipeline):
def __init__(
self,
model_id: str,
):
# IMPLEMENT_THIS
# Preload all the elements you are going to need at inference.
# For instance your model, processors, tokenizer that might be needed.
# This function is only called once, so do all the heavy processing I/O here
raise NotImplementedError(
"Please implement TokenClassificationPipeline __init__ function"
)
def __call__(self, inputs: str) -> List[Dict[str, Any]]:
"""
Args:
inputs (:obj:`str`):
a string containing some text
Return:
A :obj:`list`:. The object returned should be like [{"entity_group": "XXX", "word": "some word", "start": 3, "end": 6, "score": 0.82}] containing :
- "entity_group": A string representing what the entity is.
- "word": A rubstring of the original string that was detected as an entity.
- "start": the offset within `input` leading to `answer`. context[start:stop] == word
- "end": the ending offset within `input` leading to `answer`. context[start:stop] === word
- "score": A score between 0 and 1 describing how confident the model is for this entity.
"""
# IMPLEMENT_THIS
raise NotImplementedError(
"Please implement TokenClassificationPipeline __call__ function"
)
| 6 |
0 | hf_public_repos/api-inference-community/docker_images/common | hf_public_repos/api-inference-community/docker_images/common/tests/test_docker_build.py | import os
import subprocess
from unittest import TestCase
class cd:
"""Context manager for changing the current working directory"""
def __init__(self, newPath):
self.newPath = os.path.expanduser(newPath)
def __enter__(self):
self.savedPath = os.getcwd()
os.chdir(self.newPath)
def __exit__(self, etype, value, traceback):
os.chdir(self.savedPath)
class DockerBuildTestCase(TestCase):
def test_can_build_docker_image(self):
with cd(os.path.dirname(os.path.dirname(__file__))):
subprocess.check_output(["docker", "build", "."])
| 7 |
0 | hf_public_repos/api-inference-community/docker_images/common | hf_public_repos/api-inference-community/docker_images/common/tests/test_api_speech_segmentation.py | import json
import os
from unittest import TestCase, skipIf
from app.main import ALLOWED_TASKS
from starlette.testclient import TestClient
from tests.test_api import TESTABLE_MODELS
@skipIf(
"speech-segmentation" not in ALLOWED_TASKS,
"speech-segmentation not implemented",
)
class SpeechSegmentationTestCase(TestCase):
def setUp(self):
model_id = TESTABLE_MODELS["speech-segmentation"]
self.old_model_id = os.getenv("MODEL_ID")
self.old_task = os.getenv("TASK")
os.environ["MODEL_ID"] = model_id
os.environ["TASK"] = "speech-segmentation"
from app.main import app
self.app = app
@classmethod
def setUpClass(cls):
from app.main import get_pipeline
get_pipeline.cache_clear()
def tearDown(self):
if self.old_model_id is not None:
os.environ["MODEL_ID"] = self.old_model_id
else:
del os.environ["MODEL_ID"]
if self.old_task is not None:
os.environ["TASK"] = self.old_task
else:
del os.environ["TASK"]
def read(self, filename: str) -> bytes:
dirname = os.path.dirname(os.path.abspath(__file__))
filename = os.path.join(dirname, "samples", filename)
with open(filename, "rb") as f:
bpayload = f.read()
return bpayload
def test_original_audiofile(self):
bpayload = self.read("sample1.flac")
with TestClient(self.app) as client:
response = client.post("/", data=bpayload)
self.assertEqual(
response.status_code,
200,
)
content = json.loads(response.content)
self.assertIsInstance(content, list)
for c in content:
self.assertEqual(set(c.keys()), {"class", "start", "end"})
self.assertIsInstance(c["class"], str)
self.assertIsInstance(c["start"], float)
self.assertIsInstance(c["end"], float)
def test_malformed_audio(self):
bpayload = self.read("malformed.flac")
with TestClient(self.app) as client:
response = client.post("/", data=bpayload)
self.assertEqual(
response.status_code,
400,
)
self.assertEqual(response.content, b'{"error":"Malformed soundfile"}')
def test_dual_channel_audiofile(self):
bpayload = self.read("sample1_dual.ogg")
with TestClient(self.app) as client:
response = client.post("/", data=bpayload)
self.assertEqual(
response.status_code,
200,
)
content = json.loads(response.content)
self.assertIsInstance(content, list)
for c in content:
self.assertEqual(set(c.keys()), {"class", "start", "end"})
self.assertIsInstance(c["class"], str)
self.assertIsInstance(c["start"], float)
self.assertIsInstance(c["end"], float)
def test_webm_audiofile(self):
bpayload = self.read("sample1.webm")
with TestClient(self.app) as client:
response = client.post("/", data=bpayload)
self.assertEqual(
response.status_code,
200,
)
content = json.loads(response.content)
self.assertIsInstance(content, list)
for c in content:
self.assertEqual(set(c.keys()), {"class", "start", "end"})
self.assertIsInstance(c["class"], str)
self.assertIsInstance(c["start"], float)
self.assertIsInstance(c["end"], float)
| 8 |
0 | hf_public_repos/api-inference-community/docker_images/common | hf_public_repos/api-inference-community/docker_images/common/tests/test_api_audio_to_audio.py | import base64
import json
import os
from unittest import TestCase, skipIf
from api_inference_community.validation import ffmpeg_read
from app.main import ALLOWED_TASKS
from starlette.testclient import TestClient
from tests.test_api import TESTABLE_MODELS
@skipIf(
"audio-to-audio" not in ALLOWED_TASKS,
"audio-to-audio not implemented",
)
class AudioToAudioTestCase(TestCase):
def setUp(self):
model_id = TESTABLE_MODELS["audio-to-audio"]
self.old_model_id = os.getenv("MODEL_ID")
self.old_task = os.getenv("TASK")
os.environ["MODEL_ID"] = model_id
os.environ["TASK"] = "audio-to-audio"
from app.main import app
self.app = app
@classmethod
def setUpClass(cls):
from app.main import get_pipeline
get_pipeline.cache_clear()
def tearDown(self):
if self.old_model_id is not None:
os.environ["MODEL_ID"] = self.old_model_id
else:
del os.environ["MODEL_ID"]
if self.old_task is not None:
os.environ["TASK"] = self.old_task
else:
del os.environ["TASK"]
def read(self, filename: str) -> bytes:
dirname = os.path.dirname(os.path.abspath(__file__))
filename = os.path.join(dirname, "samples", filename)
with open(filename, "rb") as f:
bpayload = f.read()
return bpayload
def test_simple(self):
bpayload = self.read("sample1.flac")
with TestClient(self.app) as client:
response = client.post("/", data=bpayload)
self.assertEqual(
response.status_code,
200,
)
self.assertEqual(response.headers["content-type"], "application/json")
audio = json.loads(response.content)
self.assertTrue(isinstance(audio, list))
self.assertEqual(set(audio[0].keys()), {"blob", "content-type", "label"})
data = base64.b64decode(audio[0]["blob"])
wavform = ffmpeg_read(data, 16000)
self.assertGreater(wavform.shape[0], 1000)
self.assertTrue(isinstance(audio[0]["content-type"], str))
self.assertTrue(isinstance(audio[0]["label"], str))
def test_malformed_audio(self):
bpayload = self.read("malformed.flac")
with TestClient(self.app) as client:
response = client.post("/", data=bpayload)
self.assertEqual(
response.status_code,
400,
)
self.assertEqual(response.content, b'{"error":"Malformed soundfile"}')
def test_dual_channel_audiofile(self):
bpayload = self.read("sample1_dual.ogg")
with TestClient(self.app) as client:
response = client.post("/", data=bpayload)
self.assertEqual(
response.status_code,
200,
)
self.assertEqual(response.headers["content-type"], "application/json")
audio = json.loads(response.content)
self.assertTrue(isinstance(audio, list))
self.assertEqual(set(audio[0].keys()), {"blob", "content-type", "label"})
data = base64.b64decode(audio[0]["blob"])
wavform = ffmpeg_read(data, 16000)
self.assertGreater(wavform.shape[0], 1000)
self.assertTrue(isinstance(audio[0]["content-type"], str))
self.assertTrue(isinstance(audio[0]["label"], str))
def test_webm_audiofile(self):
bpayload = self.read("sample1.webm")
with TestClient(self.app) as client:
response = client.post("/", data=bpayload)
self.assertEqual(
response.status_code,
200,
)
self.assertEqual(response.headers["content-type"], "application/json")
audio = json.loads(response.content)
self.assertTrue(isinstance(audio, list))
self.assertEqual(set(audio[0].keys()), {"blob", "content-type", "label"})
data = base64.b64decode(audio[0]["blob"])
wavform = ffmpeg_read(data, 16000)
self.assertGreater(wavform.shape[0], 1000)
self.assertTrue(isinstance(audio[0]["content-type"], str))
self.assertTrue(isinstance(audio[0]["label"], str))
| 9 |
0 | hf_public_repos/blog/assets/35_bert_cpu_scaling_part_2 | hf_public_repos/blog/assets/35_bert_cpu_scaling_part_2/openmp/openmp_pytorch_latencies.svg | <svg width="5382" height="3156" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" overflow="hidden"><defs><clipPath id="clip0"><rect x="1855" y="121" width="5382" height="3156"/></clipPath><clipPath id="clip1"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip2"><rect x="2099" y="482" width="5088" height="2336"/></clipPath><linearGradient x1="2162" y1="2637" x2="2162" y2="2815" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill3"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip4"><rect x="2099" y="482" width="5088" height="2336"/></clipPath><linearGradient x1="2365.5" y1="2769" x2="2365.5" y2="2815" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill5"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip6"><rect x="2099" y="482" width="5088" height="2336"/></clipPath><linearGradient x1="2568.5" y1="2795" x2="2568.5" y2="2815" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill7"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip8"><rect x="2099" y="482" width="5088" height="2336"/></clipPath><linearGradient x1="2772.5" y1="2801" x2="2772.5" y2="2815" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill9"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip10"><rect x="2099" y="482" width="5088" height="2336"/></clipPath><linearGradient x1="2975.5" y1="2800" x2="2975.5" y2="2815" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill11"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip12"><rect x="2099" y="482" width="5088" height="2336"/></clipPath><linearGradient x1="3179" y1="2620" x2="3179" y2="2815" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill13"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip14"><rect x="2099" y="482" width="5088" height="2336"/></clipPath><linearGradient x1="3382.5" y1="2764" x2="3382.5" y2="2815" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill15"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip16"><rect x="2099" y="482" width="5088" height="2336"/></clipPath><linearGradient x1="3585.5" y1="2793" x2="3585.5" y2="2815" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill17"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip18"><rect x="2099" y="482" width="5088" height="2336"/></clipPath><linearGradient x1="3789.5" y1="2800" x2="3789.5" y2="2815" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill19"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip20"><rect x="2099" y="482" width="5088" height="2336"/></clipPath><linearGradient x1="3992.5" y1="2798" x2="3992.5" y2="2815" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill21"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip22"><rect x="2099" y="482" width="5088" height="2336"/></clipPath><linearGradient x1="4196.5" y1="2431" x2="4196.5" y2="2815" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill23"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip24"><rect x="2099" y="482" width="5088" height="2336"/></clipPath><linearGradient x1="4399.5" y1="2714" x2="4399.5" y2="2815" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill25"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip26"><rect x="2099" y="482" width="5088" height="2336"/></clipPath><linearGradient x1="4603" y1="2776" x2="4603" y2="2815" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill27"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip28"><rect x="2099" y="482" width="5088" height="2336"/></clipPath><linearGradient x1="4806.5" y1="2792" x2="4806.5" y2="2815" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill29"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip30"><rect x="2099" y="482" width="5088" height="2336"/></clipPath><linearGradient x1="5009.5" y1="2788" x2="5009.5" y2="2815" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill31"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip32"><rect x="2099" y="482" width="5088" height="2336"/></clipPath><linearGradient x1="5213.5" y1="1502" x2="5213.5" y2="2815" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill33"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip34"><rect x="2099" y="482" width="5088" height="2336"/></clipPath><linearGradient x1="5416.5" y1="2440" x2="5416.5" y2="2815" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill35"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip36"><rect x="2099" y="482" width="5088" height="2336"/></clipPath><linearGradient x1="5620" y1="2705" x2="5620" y2="2815" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill37"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip38"><rect x="2099" y="482" width="5088" height="2336"/></clipPath><linearGradient x1="5823.5" y1="2744" x2="5823.5" y2="2815" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill39"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip40"><rect x="2099" y="482" width="5088" height="2336"/></clipPath><linearGradient x1="6026.5" y1="2752" x2="6026.5" y2="2815" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill41"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip42"><rect x="2099" y="482" width="5088" height="2336"/></clipPath><linearGradient x1="6230.5" y1="732" x2="6230.5" y2="2815" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill43"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip44"><rect x="2099" y="482" width="5088" height="2336"/></clipPath><linearGradient x1="6433.5" y1="2308" x2="6433.5" y2="2815" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill45"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip46"><rect x="2099" y="482" width="5088" height="2336"/></clipPath><linearGradient x1="6637" y1="2629" x2="6637" y2="2815" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill47"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip48"><rect x="2099" y="482" width="5088" height="2336"/></clipPath><linearGradient x1="6840.5" y1="2712" x2="6840.5" y2="2815" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill49"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip50"><rect x="2099" y="482" width="5088" height="2336"/></clipPath><linearGradient x1="7043.5" y1="2744" x2="7043.5" y2="2815" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill51"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip52"><rect x="2099" y="482" width="5088" height="2336"/></clipPath><linearGradient x1="2239.5" y1="2638" x2="2239.5" y2="2815" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill53"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip54"><rect x="2099" y="482" width="5088" height="2336"/></clipPath><linearGradient x1="2443.5" y1="2769" x2="2443.5" y2="2815" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill55"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip56"><rect x="2099" y="482" width="5088" height="2336"/></clipPath><linearGradient x1="2646.5" y1="2793" x2="2646.5" y2="2815" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill57"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip58"><rect x="2099" y="482" width="5088" height="2336"/></clipPath><linearGradient x1="2850" y1="2801" x2="2850" y2="2815" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill59"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip60"><rect x="2099" y="482" width="5088" height="2336"/></clipPath><linearGradient x1="3053.5" y1="2797" x2="3053.5" y2="2815" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill61"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip62"><rect x="2099" y="482" width="5088" height="2336"/></clipPath><linearGradient x1="3257" y1="2621" x2="3257" y2="2815" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill63"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip64"><rect x="2099" y="482" width="5088" height="2336"/></clipPath><linearGradient x1="3460.5" y1="2765" x2="3460.5" y2="2815" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill65"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip66"><rect x="2099" y="482" width="5088" height="2336"/></clipPath><linearGradient x1="3663.5" y1="2791" x2="3663.5" y2="2815" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill67"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip68"><rect x="2099" y="482" width="5088" height="2336"/></clipPath><linearGradient x1="3867.5" y1="2800" x2="3867.5" y2="2815" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill69"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip70"><rect x="2099" y="482" width="5088" height="2336"/></clipPath><linearGradient x1="4070.5" y1="2795" x2="4070.5" y2="2815" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill71"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip72"><rect x="2099" y="482" width="5088" height="2336"/></clipPath><linearGradient x1="4274" y1="2431" x2="4274" y2="2815" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill73"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip74"><rect x="2099" y="482" width="5088" height="2336"/></clipPath><linearGradient x1="4477.5" y1="2715" x2="4477.5" y2="2815" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill75"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip76"><rect x="2099" y="482" width="5088" height="2336"/></clipPath><linearGradient x1="4680.5" y1="2772" x2="4680.5" y2="2815" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill77"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip78"><rect x="2099" y="482" width="5088" height="2336"/></clipPath><linearGradient x1="4884.5" y1="2791" x2="4884.5" y2="2815" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill79"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip80"><rect x="2099" y="482" width="5088" height="2336"/></clipPath><linearGradient x1="5087.5" y1="2782" x2="5087.5" y2="2815" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill81"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip82"><rect x="2099" y="482" width="5088" height="2336"/></clipPath><linearGradient x1="5291" y1="1511" x2="5291" y2="2815" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill83"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip84"><rect x="2099" y="482" width="5088" height="2336"/></clipPath><linearGradient x1="5494.5" y1="2450" x2="5494.5" y2="2815" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill85"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip86"><rect x="2099" y="482" width="5088" height="2336"/></clipPath><linearGradient x1="5697.5" y1="2693" x2="5697.5" y2="2815" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill87"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip88"><rect x="2099" y="482" width="5088" height="2336"/></clipPath><linearGradient x1="5901.5" y1="2743" x2="5901.5" y2="2815" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill89"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip90"><rect x="2099" y="482" width="5088" height="2336"/></clipPath><linearGradient x1="6104.5" y1="2750" x2="6104.5" y2="2815" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill91"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip92"><rect x="2099" y="482" width="5088" height="2336"/></clipPath><linearGradient x1="6308" y1="712" x2="6308" y2="2815" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill93"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip94"><rect x="2099" y="482" width="5088" height="2336"/></clipPath><linearGradient x1="6511.5" y1="2312" x2="6511.5" y2="2815" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill95"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip96"><rect x="2099" y="482" width="5088" height="2336"/></clipPath><linearGradient x1="6714.5" y1="2652" x2="6714.5" y2="2815" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill97"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip98"><rect x="2099" y="482" width="5088" height="2336"/></clipPath><linearGradient x1="6918.5" y1="2708" x2="6918.5" y2="2815" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill99"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip100"><rect x="2099" y="482" width="5088" height="2336"/></clipPath><linearGradient x1="7121.5" y1="2716" x2="7121.5" y2="2815" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill101"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip102"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip103"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip104"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip105"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip106"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip107"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip108"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip109"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip110"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip111"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip112"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip113"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip114"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip115"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip116"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip117"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip118"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip119"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip120"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip121"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip122"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip123"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip124"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip125"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip126"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip127"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip128"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip129"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip130"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip131"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip132"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip133"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip134"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip135"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip136"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip137"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip138"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip139"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip140"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip141"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip142"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip143"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip144"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip145"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip146"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip147"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip148"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip149"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip150"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip151"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip152"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip153"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip154"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip155"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip156"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip157"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip158"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip159"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip160"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip161"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip162"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip163"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip164"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip165"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip166"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip167"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip168"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip169"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip170"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip171"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip172"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip173"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip174"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip175"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip176"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip177"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip178"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip179"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip180"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip181"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip182"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip183"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip184"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip185"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip186"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip187"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip188"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip189"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip190"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip191"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip192"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip193"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip194"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip195"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip196"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip197"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip198"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip199"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip200"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip201"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip202"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip203"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip204"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><linearGradient x1="4444" y1="3197" x2="4444" y2="3220" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill205"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip206"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><clipPath id="clip207"><rect x="1855" y="122" width="5379" height="3152"/></clipPath><linearGradient x1="4591" y1="3197" x2="4591" y2="3220" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill208"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip209"><rect x="1855" y="122" width="5379" height="3152"/></clipPath></defs><g clip-path="url(#clip0)" transform="translate(-1855 -121)"><rect x="1856" y="122" width="5379" height="3153" fill="#FFFFFF"/><g clip-path="url(#clip1)"><path d="M2099.5 2556.58 7184.5 2556.58M2099.5 2296.58 7184.5 2296.58M2099.5 2037.57 7184.5 2037.57M2099.5 1778.56 7184.5 1778.56M2099.5 1519.55 7184.5 1519.55M2099.5 1260.54 7184.5 1260.54M2099.5 1001.53 7184.5 1001.53M2099.5 741.525 7184.5 741.525M2099.5 482.5 7184.5 482.5" stroke="#E0E5EB" stroke-width="3.4375" stroke-linejoin="round" stroke-miterlimit="10" fill="none"/></g><g clip-path="url(#clip2)"><rect x="2131" y="2637" width="62.0002" height="178" fill="url(#fill3)"/></g><g clip-path="url(#clip4)"><rect x="2334" y="2769" width="63" height="46" fill="url(#fill5)"/></g><g clip-path="url(#clip6)"><rect x="2537" y="2795" width="63.0002" height="20" fill="url(#fill7)"/></g><g clip-path="url(#clip8)"><rect x="2741" y="2801" width="63" height="14" fill="url(#fill9)"/></g><g clip-path="url(#clip10)"><rect x="2944" y="2800" width="63.0002" height="15.0002" fill="url(#fill11)"/></g><g clip-path="url(#clip12)"><rect x="3148" y="2620" width="61.9998" height="195" fill="url(#fill13)"/></g><g clip-path="url(#clip14)"><rect x="3351" y="2764" width="63.0002" height="50.9998" fill="url(#fill15)"/></g><g clip-path="url(#clip16)"><rect x="3554" y="2793" width="63" height="22" fill="url(#fill17)"/></g><g clip-path="url(#clip18)"><rect x="3758" y="2800" width="63.0002" height="15.0002" fill="url(#fill19)"/></g><g clip-path="url(#clip20)"><rect x="3961" y="2798" width="63" height="16.9998" fill="url(#fill21)"/></g><g clip-path="url(#clip22)"><rect x="4165" y="2431" width="63.0005" height="384" fill="url(#fill23)"/></g><g clip-path="url(#clip24)"><rect x="4368" y="2714" width="63" height="101" fill="url(#fill25)"/></g><g clip-path="url(#clip26)"><rect x="4572" y="2776" width="62" height="38.9998" fill="url(#fill27)"/></g><g clip-path="url(#clip28)"><rect x="4775" y="2792" width="63" height="23" fill="url(#fill29)"/></g><g clip-path="url(#clip30)"><rect x="4978" y="2788" width="63.0005" height="26.9998" fill="url(#fill31)"/></g><g clip-path="url(#clip32)"><rect x="5182" y="1502" width="63" height="1313" fill="url(#fill33)"/></g><g clip-path="url(#clip34)"><rect x="5385" y="2440" width="63.0005" height="375" fill="url(#fill35)"/></g><g clip-path="url(#clip36)"><rect x="5589" y="2705" width="62" height="110" fill="url(#fill37)"/></g><g clip-path="url(#clip38)"><rect x="5792" y="2744" width="63.0005" height="70.9998" fill="url(#fill39)"/></g><g clip-path="url(#clip40)"><rect x="5995" y="2752" width="63" height="63" fill="url(#fill41)"/></g><g clip-path="url(#clip42)"><rect x="6199" y="732" width="63.0005" height="2083" fill="url(#fill43)"/></g><g clip-path="url(#clip44)"><rect x="6402" y="2308" width="63" height="507" fill="url(#fill45)"/></g><g clip-path="url(#clip46)"><rect x="6606" y="2629" width="62" height="186" fill="url(#fill47)"/></g><g clip-path="url(#clip48)"><rect x="6809" y="2712" width="63" height="103" fill="url(#fill49)"/></g><g clip-path="url(#clip50)"><rect x="7012" y="2744" width="63" height="70.9998" fill="url(#fill51)"/></g><g clip-path="url(#clip52)"><rect x="2208" y="2638" width="63.0002" height="177" fill="url(#fill53)"/></g><g clip-path="url(#clip54)"><rect x="2412" y="2769" width="63" height="46" fill="url(#fill55)"/></g><g clip-path="url(#clip56)"><rect x="2615" y="2793" width="63.0002" height="22" fill="url(#fill57)"/></g><g clip-path="url(#clip58)"><rect x="2819" y="2801" width="61.9998" height="14" fill="url(#fill59)"/></g><g clip-path="url(#clip60)"><rect x="3022" y="2797" width="63.0002" height="17.9998" fill="url(#fill61)"/></g><g clip-path="url(#clip62)"><rect x="3226" y="2621" width="61.9998" height="194" fill="url(#fill63)"/></g><g clip-path="url(#clip64)"><rect x="3429" y="2765" width="63.0002" height="49.9998" fill="url(#fill65)"/></g><g clip-path="url(#clip66)"><rect x="3632" y="2791" width="63" height="24" fill="url(#fill67)"/></g><g clip-path="url(#clip68)"><rect x="3836" y="2800" width="63.0002" height="15.0002" fill="url(#fill69)"/></g><g clip-path="url(#clip70)"><rect x="4039" y="2795" width="63" height="20" fill="url(#fill71)"/></g><g clip-path="url(#clip72)"><rect x="4243" y="2431" width="62" height="384" fill="url(#fill73)"/></g><g clip-path="url(#clip74)"><rect x="4446" y="2715" width="63" height="100" fill="url(#fill75)"/></g><g clip-path="url(#clip76)"><rect x="4649" y="2772" width="63.0005" height="43" fill="url(#fill77)"/></g><g clip-path="url(#clip78)"><rect x="4853" y="2791" width="63" height="24" fill="url(#fill79)"/></g><g clip-path="url(#clip80)"><rect x="5056" y="2782" width="63.0005" height="33" fill="url(#fill81)"/></g><g clip-path="url(#clip82)"><rect x="5260" y="1511" width="62" height="1304" fill="url(#fill83)"/></g><g clip-path="url(#clip84)"><rect x="5463" y="2450" width="63.0005" height="365" fill="url(#fill85)"/></g><g clip-path="url(#clip86)"><rect x="5666" y="2693" width="63" height="122" fill="url(#fill87)"/></g><g clip-path="url(#clip88)"><rect x="5870" y="2743" width="63.0005" height="71.9998" fill="url(#fill89)"/></g><g clip-path="url(#clip90)"><rect x="6073" y="2750" width="63" height="65" fill="url(#fill91)"/></g><g clip-path="url(#clip92)"><rect x="6277" y="712" width="62" height="2103" fill="url(#fill93)"/></g><g clip-path="url(#clip94)"><rect x="6480" y="2312" width="63" height="503" fill="url(#fill95)"/></g><g clip-path="url(#clip96)"><rect x="6683" y="2652" width="63.0005" height="163" fill="url(#fill97)"/></g><g clip-path="url(#clip98)"><rect x="6887" y="2708" width="63" height="107" fill="url(#fill99)"/></g><g clip-path="url(#clip100)"><rect x="7090" y="2716" width="63.0005" height="99" fill="url(#fill101)"/></g><g clip-path="url(#clip102)"><path d="M2099.5 2815.5 7184.5 2815.5" stroke="#E0E5EB" stroke-width="3.4375" stroke-linejoin="round" stroke-miterlimit="10" fill="none" fill-rule="evenodd"/></g><g clip-path="url(#clip103)"><path d="M2099.5 2815.5 2099.5 2895.5M3116.6 2815.5 3116.6 2895.5M4133.64 2815.5 4133.64 2895.5M5150.67 2815.5 5150.67 2895.5M6167.7 2815.5 6167.7 2895.5M7184.5 2815.5 7184.5 2895.5" stroke="#E0E5EB" stroke-width="3.4375" stroke-linejoin="round" stroke-miterlimit="10" fill="none"/></g><g clip-path="url(#clip104)"><path d="M2099.5 2895.5 2099.5 2975.5M3116.6 2895.5 3116.6 2975.5M4133.64 2895.5 4133.64 2975.5M5150.67 2895.5 5150.67 2975.5M6167.7 2895.5 6167.7 2975.5M7184.5 2895.5 7184.5 2975.5" stroke="#E0E5EB" stroke-width="3.4375" stroke-linejoin="round" stroke-miterlimit="10" fill="none"/></g><g clip-path="url(#clip105)"><path d="M2099.5 2975.5 2099.5 3055.5M7184.5 2975.5 7184.5 3055.5" stroke="#E0E5EB" stroke-width="3.4375" stroke-linejoin="round" stroke-miterlimit="10" fill="none"/></g><g clip-path="url(#clip106)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2130.67 2605)">138</text></g><g clip-path="url(#clip107)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2344.53 2737)">36</text></g><g clip-path="url(#clip108)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2547.94 2763)">16</text></g><g clip-path="url(#clip109)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2751.35 2769)">11</text></g><g clip-path="url(#clip110)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2954.76 2768)">12</text></g><g clip-path="url(#clip111)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3147.72 2588)">151</text></g><g clip-path="url(#clip112)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3361.58 2732)">40</text></g><g clip-path="url(#clip113)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3564.98 2761)">18</text></g><g clip-path="url(#clip114)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3768.39 2768)">12</text></g><g clip-path="url(#clip115)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3971.8 2766)">14</text></g><g clip-path="url(#clip116)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4164.76 2399)">296</text></g><g clip-path="url(#clip117)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4378.62 2682)">78</text></g><g clip-path="url(#clip118)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4582.03 2744)">30</text></g><g clip-path="url(#clip119)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4785.44 2760)">18</text></g><g clip-path="url(#clip120)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4988.85 2756)">21</text></g><g clip-path="url(#clip121)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5171.35 1470)">1014</text></g><g clip-path="url(#clip122)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5385.21 2408)">290</text></g><g clip-path="url(#clip123)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5599.07 2673)">85</text></g><g clip-path="url(#clip124)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5802.48 2712)">55</text></g><g clip-path="url(#clip125)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6005.89 2720)">49</text></g><g clip-path="url(#clip126)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6188.4 700)">1608</text></g><g clip-path="url(#clip127)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6402.26 2276)">391</text></g><g clip-path="url(#clip128)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6605.67 2597)">144</text></g><g clip-path="url(#clip129)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6819.53 2680)">80</text></g><g clip-path="url(#clip130)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 7022.93 2712)">55</text></g><g clip-path="url(#clip131)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2208.52 2606)">137</text></g><g clip-path="url(#clip132)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2422.38 2737)">36</text></g><g clip-path="url(#clip133)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2625.79 2761)">17</text></g><g clip-path="url(#clip134)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2829.2 2769)">11</text></g><g clip-path="url(#clip135)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3032.61 2765)">14</text></g><g clip-path="url(#clip136)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3225.56 2589)">150</text></g><g clip-path="url(#clip137)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3439.42 2733)">39</text></g><g clip-path="url(#clip138)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3642.83 2759)">19</text></g><g clip-path="url(#clip139)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3846.24 2768)">12</text></g><g clip-path="url(#clip140)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4049.65 2763)">16</text></g><g clip-path="url(#clip141)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4242.61 2399)">296</text></g><g clip-path="url(#clip142)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4456.47 2683)">77</text></g><g clip-path="url(#clip143)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4659.88 2740)">33</text></g><g clip-path="url(#clip144)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4863.28 2759)">19</text></g><g clip-path="url(#clip145)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5066.69 2750)">25</text></g><g clip-path="url(#clip146)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5249.2 1479)">1006</text></g><g clip-path="url(#clip147)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5463.06 2418)">282</text></g><g clip-path="url(#clip148)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5676.92 2661)">95</text></g><g clip-path="url(#clip149)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5880.33 2711)">56</text></g><g clip-path="url(#clip150)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6083.74 2718)">50</text></g><g clip-path="url(#clip151)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6266.25 681)">1623</text></g><g clip-path="url(#clip152)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6480.11 2280)">388</text></g><g clip-path="url(#clip153)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6683.51 2620)">126</text></g><g clip-path="url(#clip154)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6897.37 2676)">83</text></g><g clip-path="url(#clip155)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 7100.78 2684)">77</text></g><g clip-path="url(#clip156)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2040.08 2827)">0</text></g><g clip-path="url(#clip157)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1998.28 2568)">200</text></g><g clip-path="url(#clip158)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1998.28 2309)">400</text></g><g clip-path="url(#clip159)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1998.28 2049)">600</text></g><g clip-path="url(#clip160)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1998.28 1790)">800</text></g><g clip-path="url(#clip161)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1977.38 1531)">1000</text></g><g clip-path="url(#clip162)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1977.38 1272)">1200</text></g><g clip-path="url(#clip163)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1977.38 1013)">1400</text></g><g clip-path="url(#clip164)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1977.38 753)">1600</text></g><g clip-path="url(#clip165)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1977.38 494)">1800</text></g><g clip-path="url(#clip166)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2190.5 2881)">1</text></g><g clip-path="url(#clip167)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2393.91 2881)">4</text></g><g clip-path="url(#clip168)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2586.86 2881)">16</text></g><g clip-path="url(#clip169)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2790.27 2881)">40</text></g><g clip-path="url(#clip170)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2993.68 2881)">80</text></g><g clip-path="url(#clip171)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3207.54 2881)">1</text></g><g clip-path="url(#clip172)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3410.95 2881)">4</text></g><g clip-path="url(#clip173)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3603.91 2881)">16</text></g><g clip-path="url(#clip174)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3807.32 2881)">40</text></g><g clip-path="url(#clip175)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4010.73 2881)">80</text></g><g clip-path="url(#clip176)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4224.58 2881)">1</text></g><g clip-path="url(#clip177)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4427.99 2881)">4</text></g><g clip-path="url(#clip178)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4620.95 2881)">16</text></g><g clip-path="url(#clip179)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4824.36 2881)">40</text></g><g clip-path="url(#clip180)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5027.77 2881)">80</text></g><g clip-path="url(#clip181)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5241.63 2881)">1</text></g><g clip-path="url(#clip182)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5445.04 2881)">4</text></g><g clip-path="url(#clip183)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5638 2881)">16</text></g><g clip-path="url(#clip184)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5841.41 2881)">40</text></g><g clip-path="url(#clip185)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6044.81 2881)">80</text></g><g clip-path="url(#clip186)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6258.67 2881)">1</text></g><g clip-path="url(#clip187)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6462.08 2881)">4</text></g><g clip-path="url(#clip188)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6655.04 2881)">16</text></g><g clip-path="url(#clip189)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6858.45 2881)">40</text></g><g clip-path="url(#clip190)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 7061.86 2881)">80</text></g><g clip-path="url(#clip191)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2586.86 2961)">20</text></g><g clip-path="url(#clip192)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3603.91 2961)">32</text></g><g clip-path="url(#clip193)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4610.5 2961)">128</text></g><g clip-path="url(#clip194)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5627.55 2961)">384</text></g><g clip-path="url(#clip195)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6644.59 2961)">512</text></g><g clip-path="url(#clip196)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4577.14 3041)">pytorch</text></g><g clip-path="url(#clip197)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="41" transform="matrix(6.12323e-17 -1 1 6.12323e-17 1952.28 1757)">Latency (ms)</text></g><g clip-path="url(#clip198)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="41" transform="matrix(1 0 0 1 4500.96 3101)">Sequence Length</text></g><g clip-path="url(#clip199)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="73" transform="matrix(1 0 0 1 4038.87 227)">Intel Ice lake Xeon 8380 </text></g><g clip-path="url(#clip200)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="73" transform="matrix(1 0 0 1 4774.22 227)">-</text></g><g clip-path="url(#clip201)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="73" transform="matrix(1 0 0 1 4813.25 227)">PyTorch</text></g><g clip-path="url(#clip202)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="73" transform="matrix(1 0 0 1 3238.32 316)">Latency Benchmark with varying number of CPU cores and different OpenMP backend</text></g><g clip-path="url(#clip203)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="73" transform="matrix(1 0 0 1 3452.96 406)">(some data might be missing indicating benchmark crashed during runs)</text></g><g clip-path="url(#clip204)"><rect x="4433" y="3197" width="22" height="23" fill="url(#fill205)"/></g><g clip-path="url(#clip206)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4464.87 3220)">iomp</text></g><g clip-path="url(#clip207)"><rect x="4580" y="3197" width="22" height="23" fill="url(#fill208)"/></g><g clip-path="url(#clip209)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4611.98 3220)">gnu</text></g><rect x="1856.5" y="122.5" width="5379" height="3153" stroke="#E0E5EB" stroke-width="3.4375" stroke-linejoin="round" stroke-miterlimit="10" fill="none"/></g></svg> | 0 |
0 | hf_public_repos/blog/assets/35_bert_cpu_scaling_part_2 | hf_public_repos/blog/assets/35_bert_cpu_scaling_part_2/baselines/graph_mode_tensorflow_baseline.svg | <svg width="5724" height="3155" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" overflow="hidden"><defs><clipPath id="clip0"><rect x="1263" y="130" width="5724" height="3155"/></clipPath><clipPath id="clip1"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip2"><rect x="1506" y="491" width="5432" height="2492"/></clipPath><linearGradient x1="1663" y1="2839" x2="1663" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill3"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip4"><rect x="1506" y="491" width="5432" height="2492"/></clipPath><linearGradient x1="2748.5" y1="2827" x2="2748.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill5"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip6"><rect x="1506" y="491" width="5432" height="2492"/></clipPath><linearGradient x1="3834" y1="2543" x2="3834" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill7"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip8"><rect x="1506" y="491" width="5432" height="2492"/></clipPath><linearGradient x1="4920" y1="1380" x2="4920" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill9"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip10"><rect x="1506" y="491" width="5432" height="2492"/></clipPath><linearGradient x1="6005" y1="675" x2="6005" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill11"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip12"><rect x="1506" y="491" width="5432" height="2492"/></clipPath><linearGradient x1="1856" y1="2936" x2="1856" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill13"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip14"><rect x="1506" y="491" width="5432" height="2492"/></clipPath><linearGradient x1="2942" y1="2906" x2="2942" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill15"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip16"><rect x="1506" y="491" width="5432" height="2492"/></clipPath><linearGradient x1="4027" y1="2808" x2="4027" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill17"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip18"><rect x="1506" y="491" width="5432" height="2492"/></clipPath><linearGradient x1="5113" y1="2591" x2="5113" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill19"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip20"><rect x="1506" y="491" width="5432" height="2492"/></clipPath><linearGradient x1="6199" y1="2447" x2="6199" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill21"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip22"><rect x="1506" y="491" width="5432" height="2492"/></clipPath><linearGradient x1="2050" y1="2952" x2="2050" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill23"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip24"><rect x="1506" y="491" width="5432" height="2492"/></clipPath><linearGradient x1="3135" y1="2940" x2="3135" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill25"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip26"><rect x="1506" y="491" width="5432" height="2492"/></clipPath><linearGradient x1="4221" y1="2874" x2="4221" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill27"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip28"><rect x="1506" y="491" width="5432" height="2492"/></clipPath><linearGradient x1="5306" y1="2777" x2="5306" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill29"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip30"><rect x="1506" y="491" width="5432" height="2492"/></clipPath><linearGradient x1="6392" y1="2756" x2="6392" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill31"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip32"><rect x="1506" y="491" width="5432" height="2492"/></clipPath><linearGradient x1="2243" y1="2961" x2="2243" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill33"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip34"><rect x="1506" y="491" width="5432" height="2492"/></clipPath><linearGradient x1="3329" y1="2953" x2="3329" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill35"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip36"><rect x="1506" y="491" width="5432" height="2492"/></clipPath><linearGradient x1="4414" y1="2918" x2="4414" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill37"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip38"><rect x="1506" y="491" width="5432" height="2492"/></clipPath><linearGradient x1="5500" y1="2876" x2="5500" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill39"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip40"><rect x="1506" y="491" width="5432" height="2492"/></clipPath><linearGradient x1="6585" y1="2862" x2="6585" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill41"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip42"><rect x="1506" y="491" width="5432" height="2492"/></clipPath><linearGradient x1="2437" y1="2958" x2="2437" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill43"><stop offset="0" stop-color="#71A6DB"/><stop offset="0.5" stop-color="#559BDB"/><stop offset="1" stop-color="#438AC9"/></linearGradient><clipPath id="clip44"><rect x="1506" y="491" width="5432" height="2492"/></clipPath><linearGradient x1="3522" y1="2945" x2="3522" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill45"><stop offset="0" stop-color="#71A6DB"/><stop offset="0.5" stop-color="#559BDB"/><stop offset="1" stop-color="#438AC9"/></linearGradient><clipPath id="clip46"><rect x="1506" y="491" width="5432" height="2492"/></clipPath><linearGradient x1="4608" y1="2882" x2="4608" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill47"><stop offset="0" stop-color="#71A6DB"/><stop offset="0.5" stop-color="#559BDB"/><stop offset="1" stop-color="#438AC9"/></linearGradient><clipPath id="clip48"><rect x="1506" y="491" width="5432" height="2492"/></clipPath><linearGradient x1="5693" y1="2835" x2="5693" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill49"><stop offset="0" stop-color="#71A6DB"/><stop offset="0.5" stop-color="#559BDB"/><stop offset="1" stop-color="#438AC9"/></linearGradient><clipPath id="clip50"><rect x="1506" y="491" width="5432" height="2492"/></clipPath><linearGradient x1="6779" y1="2820" x2="6779" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill51"><stop offset="0" stop-color="#71A6DB"/><stop offset="0.5" stop-color="#559BDB"/><stop offset="1" stop-color="#438AC9"/></linearGradient><clipPath id="clip52"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip53"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip54"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip55"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip56"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip57"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip58"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip59"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip60"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip61"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip62"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip63"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip64"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip65"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip66"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip67"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip68"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip69"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip70"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip71"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip72"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip73"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip74"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip75"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip76"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip77"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip78"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip79"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip80"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip81"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip82"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip83"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip84"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip85"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip86"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip87"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip88"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip89"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip90"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip91"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip92"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip93"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip94"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip95"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip96"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip97"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip98"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip99"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip100"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip101"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><linearGradient x1="3927.5" y1="3206" x2="3927.5" y2="3228" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill102"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip103"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip104"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><linearGradient x1="4006.5" y1="3206" x2="4006.5" y2="3228" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill105"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip106"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip107"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><linearGradient x1="4086" y1="3206" x2="4086" y2="3228" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill108"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip109"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip110"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><linearGradient x1="4186.5" y1="3206" x2="4186.5" y2="3228" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill111"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip112"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><clipPath id="clip113"><rect x="1263" y="130" width="5722" height="3153"/></clipPath><linearGradient x1="4286.5" y1="3206" x2="4286.5" y2="3228" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill114"><stop offset="0" stop-color="#71A6DB"/><stop offset="0.5" stop-color="#559BDB"/><stop offset="1" stop-color="#438AC9"/></linearGradient><clipPath id="clip115"><rect x="1263" y="130" width="5722" height="3153"/></clipPath></defs><g clip-path="url(#clip0)" transform="translate(-1263 -130)"><rect x="1264" y="131" width="5721" height="3152" fill="#FFFFFF"/><g clip-path="url(#clip1)"><path d="M1506.5 2731.59 6934.5 2731.59M1506.5 2482.58 6934.5 2482.58M1506.5 2233.57 6934.5 2233.57M1506.5 1984.57 6934.5 1984.57M1506.5 1735.56 6934.5 1735.56M1506.5 1486.55 6934.5 1486.55M1506.5 1237.54 6934.5 1237.54M1506.5 988.533 6934.5 988.533M1506.5 740.524 6934.5 740.524M1506.5 491.5 6934.5 491.5" stroke="#E0E5EB" stroke-width="3.4375" stroke-linejoin="round" stroke-miterlimit="10" fill="none"/></g><g clip-path="url(#clip2)"><rect x="1585" y="2839" width="156" height="141" fill="url(#fill3)"/></g><g clip-path="url(#clip4)"><rect x="2671" y="2827" width="155" height="153" fill="url(#fill5)"/></g><g clip-path="url(#clip6)"><rect x="3756" y="2543" width="156" height="437" fill="url(#fill7)"/></g><g clip-path="url(#clip8)"><rect x="4842" y="1380" width="156" height="1600" fill="url(#fill9)"/></g><g clip-path="url(#clip10)"><rect x="5927" y="675" width="156" height="2305" fill="url(#fill11)"/></g><g clip-path="url(#clip12)"><rect x="1778" y="2936" width="156" height="44" fill="url(#fill13)"/></g><g clip-path="url(#clip14)"><rect x="2864" y="2906" width="156" height="74" fill="url(#fill15)"/></g><g clip-path="url(#clip16)"><rect x="3949" y="2808" width="156" height="172" fill="url(#fill17)"/></g><g clip-path="url(#clip18)"><rect x="5035" y="2591" width="156" height="389" fill="url(#fill19)"/></g><g clip-path="url(#clip20)"><rect x="6121" y="2447" width="156" height="533" fill="url(#fill21)"/></g><g clip-path="url(#clip22)"><rect x="1972" y="2952" width="156" height="27.9998" fill="url(#fill23)"/></g><g clip-path="url(#clip24)"><rect x="3057" y="2940" width="156" height="39.9998" fill="url(#fill25)"/></g><g clip-path="url(#clip26)"><rect x="4143" y="2874" width="156" height="106" fill="url(#fill27)"/></g><g clip-path="url(#clip28)"><rect x="5228" y="2777" width="156" height="203" fill="url(#fill29)"/></g><g clip-path="url(#clip30)"><rect x="6314" y="2756" width="156" height="224" fill="url(#fill31)"/></g><g clip-path="url(#clip32)"><rect x="2165" y="2961" width="156" height="19" fill="url(#fill33)"/></g><g clip-path="url(#clip34)"><rect x="3251" y="2953" width="156" height="26.9998" fill="url(#fill35)"/></g><g clip-path="url(#clip36)"><rect x="4336" y="2918" width="156" height="61.9998" fill="url(#fill37)"/></g><g clip-path="url(#clip38)"><rect x="5422" y="2876" width="156" height="104" fill="url(#fill39)"/></g><g clip-path="url(#clip40)"><rect x="6507" y="2862" width="156" height="118" fill="url(#fill41)"/></g><g clip-path="url(#clip42)"><rect x="2359" y="2958" width="156" height="22" fill="url(#fill43)"/></g><g clip-path="url(#clip44)"><rect x="3444" y="2945" width="156" height="35" fill="url(#fill45)"/></g><g clip-path="url(#clip46)"><rect x="4530" y="2882" width="156" height="98" fill="url(#fill47)"/></g><g clip-path="url(#clip48)"><rect x="5615" y="2835" width="156" height="145" fill="url(#fill49)"/></g><g clip-path="url(#clip50)"><rect x="6701" y="2820" width="156" height="160" fill="url(#fill51)"/></g><g clip-path="url(#clip52)"><path d="M1506.5 2980.5 6934.5 2980.5" stroke="#E0E5EB" stroke-width="3.4375" stroke-linejoin="round" stroke-miterlimit="10" fill="none" fill-rule="evenodd"/></g><g clip-path="url(#clip53)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1631.56 2807)">114</text></g><g clip-path="url(#clip54)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2717.14 2795)">124</text></g><g clip-path="url(#clip55)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3802.72 2511)">351</text></g><g clip-path="url(#clip56)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4877.86 1348)">1286</text></g><g clip-path="url(#clip57)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5963.44 643)">1852</text></g><g clip-path="url(#clip58)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1835.42 2904)">36</text></g><g clip-path="url(#clip59)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2921 2874)">59</text></g><g clip-path="url(#clip60)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3996.13 2776)">139</text></g><g clip-path="url(#clip61)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5081.71 2559)">313</text></g><g clip-path="url(#clip62)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6167.3 2415)">429</text></g><g clip-path="url(#clip63)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2028.83 2920)">23</text></g><g clip-path="url(#clip64)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3114.41 2908)">32</text></g><g clip-path="url(#clip65)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4199.99 2842)">86</text></g><g clip-path="url(#clip66)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5275.12 2745)">164</text></g><g clip-path="url(#clip67)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6360.71 2724)">181</text></g><g clip-path="url(#clip68)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2222.23 2929)">16</text></g><g clip-path="url(#clip69)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3307.82 2921)">22</text></g><g clip-path="url(#clip70)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4393.4 2886)">50</text></g><g clip-path="url(#clip71)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5478.98 2844)">84</text></g><g clip-path="url(#clip72)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6564.56 2830)">96</text></g><g clip-path="url(#clip73)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2415.64 2926)">18</text></g><g clip-path="url(#clip74)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3501.22 2913)">28</text></g><g clip-path="url(#clip75)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4586.81 2850)">79</text></g><g clip-path="url(#clip76)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5661.94 2803)">117</text></g><g clip-path="url(#clip77)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6747.52 2788)">129</text></g><g clip-path="url(#clip78)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1447.77 2992)">0</text></g><g clip-path="url(#clip79)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1405.97 2743)">200</text></g><g clip-path="url(#clip80)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1405.97 2494)">400</text></g><g clip-path="url(#clip81)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1405.97 2245)">600</text></g><g clip-path="url(#clip82)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1405.97 1996)">800</text></g><g clip-path="url(#clip83)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1385.07 1747)">1000</text></g><g clip-path="url(#clip84)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1385.07 1498)">1200</text></g><g clip-path="url(#clip85)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1385.07 1249)">1400</text></g><g clip-path="url(#clip86)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1385.07 1001)">1600</text></g><g clip-path="url(#clip87)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1385.07 752)">1800</text></g><g clip-path="url(#clip88)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1385.07 503)">2000</text></g><g clip-path="url(#clip89)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2028.83 3046)">20</text></g><g clip-path="url(#clip90)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3114.41 3046)">32</text></g><g clip-path="url(#clip91)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4189.54 3046)">128</text></g><g clip-path="url(#clip92)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5275.12 3046)">384</text></g><g clip-path="url(#clip93)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6360.71 3046)">512</text></g><g clip-path="url(#clip94)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="41" transform="matrix(6.12323e-17 -1 1 6.12323e-17 1359.97 1844)">Latency (ms)</text></g><g clip-path="url(#clip95)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="41" transform="matrix(1 0 0 1 4080 3110)">Sequence Length</text></g><g clip-path="url(#clip96)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="73" transform="matrix(1 0 0 1 3341.63 235)">Intel Ice lake Xeon 8380 </text></g><g clip-path="url(#clip97)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="73" transform="matrix(1 0 0 1 4076.98 235)">-</text></g><g clip-path="url(#clip98)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="73" transform="matrix(1 0 0 1 4116 235)">TensorFlow (Graph Mode)</text></g><g clip-path="url(#clip99)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="73" transform="matrix(1 0 0 1 2850.91 324)">Latency Benchmark with varying number of CPU cores involved in the computations</text></g><g clip-path="url(#clip100)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="73" transform="matrix(1 0 0 1 3032 414)">(some data might be missing indicating benchmark crashed during runs)</text></g><g clip-path="url(#clip101)"><rect x="3916" y="3206" width="23" height="22" fill="url(#fill102)"/></g><g clip-path="url(#clip103)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3948.37 3229)">1</text></g><g clip-path="url(#clip104)"><rect x="3995" y="3206" width="23" height="22" fill="url(#fill105)"/></g><g clip-path="url(#clip106)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4027.71 3229)">4</text></g><g clip-path="url(#clip107)"><rect x="4075" y="3206" width="21.9998" height="22" fill="url(#fill108)"/></g><g clip-path="url(#clip109)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4107.04 3229)">16</text></g><g clip-path="url(#clip110)"><rect x="4175" y="3206" width="23" height="22" fill="url(#fill111)"/></g><g clip-path="url(#clip112)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4207.28 3229)">40</text></g><g clip-path="url(#clip113)"><rect x="4275" y="3206" width="23" height="22" fill="url(#fill114)"/></g><g clip-path="url(#clip115)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4307.52 3229)">80</text></g><rect x="1264.5" y="131.5" width="5721" height="3152" stroke="#E0E5EB" stroke-width="3.4375" stroke-linejoin="round" stroke-miterlimit="10" fill="none"/></g></svg> | 1 |
0 | hf_public_repos/blog/assets/35_bert_cpu_scaling_part_2 | hf_public_repos/blog/assets/35_bert_cpu_scaling_part_2/baselines/eager_mode_baseline.svg | <svg width="5881" height="3485" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" overflow="hidden"><defs><clipPath id="clip0"><rect x="1075" y="68" width="5881" height="3485"/></clipPath><clipPath id="clip1"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip2"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="1326" y1="2935" x2="1326" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill3"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip4"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="1486" y1="2908" x2="1486" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill5"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip6"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="1647" y1="2696" x2="1647" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill7"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip8"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="1807" y1="1861" x2="1807" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill9"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip10"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="1967" y1="1299" x2="1967" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill11"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip12"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="2127" y1="2995" x2="2127" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill13"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip14"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="2287" y1="2980" x2="2287" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill15"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip16"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="2447" y1="2866" x2="2447" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill17"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip18"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="2608" y1="2452" x2="2608" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill19"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip20"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="2768" y1="2194" x2="2768" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill21"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip22"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="2928" y1="3025" x2="2928" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill23"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip24"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="3088" y1="3012" x2="3088" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill25"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip26"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="3248" y1="2940" x2="3248" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill27"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip28"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="3408" y1="2760" x2="3408" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill29"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip30"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="3568" y1="2641" x2="3568" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill31"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip32"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="3729" y1="3039" x2="3729" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill33"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip34"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="3889" y1="3032" x2="3889" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill35"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip36"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="4049" y1="2985" x2="4049" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill37"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip38"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="4209" y1="2888" x2="4209" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill39"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip40"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="4369" y1="2843" x2="4369" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill41"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip42"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="4529" y1="3045" x2="4529" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill43"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip44"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="4690" y1="3039" x2="4690" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill45"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip46"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="4850" y1="2996" x2="4850" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill47"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip48"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="5010" y1="2929" x2="5010" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill49"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip50"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="5170" y1="2897" x2="5170" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill51"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip52"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="5330" y1="3050" x2="5330" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill53"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip54"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="5490" y1="3043" x2="5490" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill55"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip56"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="5650" y1="3015" x2="5650" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill57"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip58"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="5811" y1="2982" x2="5811" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill59"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip60"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="5971" y1="2968" x2="5971" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill61"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip62"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="6131" y1="3047" x2="6131" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill63"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip64"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="6291" y1="3039" x2="6291" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill65"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip66"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="6451" y1="3009" x2="6451" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill67"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip68"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="6611" y1="2984" x2="6611" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill69"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip70"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="6771" y1="2968" x2="6771" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill71"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip72"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="1361" y1="2993" x2="1361" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill73"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip74"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="1521" y1="2981" x2="1521" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill75"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip76"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="1681" y1="2844" x2="1681" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill77"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip78"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="1841" y1="2181" x2="1841" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill79"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip80"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="2002" y1="1605" x2="2002" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill81"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip82"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="2162" y1="3062" x2="2162" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill83"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip84"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="2322" y1="3056" x2="2322" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill85"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip86"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="2482" y1="2982" x2="2482" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill87"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip88"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="2642" y1="2610" x2="2642" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill89"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip90"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="2802" y1="2395" x2="2802" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill91"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip92"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="2963" y1="3088" x2="2963" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill93"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip94"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="3123" y1="3085" x2="3123" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill95"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip96"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="3283" y1="3049" x2="3283" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill97"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip98"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="3443" y1="2857" x2="3443" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill99"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip100"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="3603" y1="2758" x2="3603" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill101"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip102"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="3763" y1="3103" x2="3763" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill103"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip104"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="3923" y1="3101" x2="3923" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill105"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip106"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="4084" y1="3082" x2="4084" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill107"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip108"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="4244" y1="2983" x2="4244" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill109"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip110"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="4404" y1="2933" x2="4404" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill111"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip112"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="4564" y1="3105" x2="4564" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill113"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip114"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="4724" y1="3103" x2="4724" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill115"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip116"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="4884" y1="3090" x2="4884" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill117"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip118"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="5045" y1="3033" x2="5045" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill119"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip120"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="5205" y1="3003" x2="5205" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill121"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip122"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="5365" y1="3111" x2="5365" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill123"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip124"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="5525" y1="3110" x2="5525" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill125"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip126"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="5685" y1="3104" x2="5685" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill127"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip128"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="5845" y1="3069" x2="5845" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill129"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip130"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="6005" y1="3044" x2="6005" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill131"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip132"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="6166" y1="3108" x2="6166" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill133"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip134"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="6326" y1="3106" x2="6326" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill135"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip136"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="6486" y1="3097" x2="6486" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill137"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip138"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="6646" y1="3074" x2="6646" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill139"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip140"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="6806" y1="3050" x2="6806" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill141"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip142"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="1396" y1="2936" x2="1396" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill143"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip144"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="1556" y1="2908" x2="1556" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill145"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip146"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="1716" y1="2639" x2="1716" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill147"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip148"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="1876" y1="1645" x2="1876" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill149"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip150"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="2036" y1="867" x2="2036" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill151"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip152"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="2196" y1="2997" x2="2196" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill153"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip154"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="2357" y1="2912" x2="2357" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill155"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip156"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="2517" y1="2756" x2="2517" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill157"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip158"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="2677" y1="2327" x2="2677" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill159"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip160"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="2837" y1="1967" x2="2837" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill161"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip162"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="2997" y1="3021" x2="2997" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill163"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip164"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="3157" y1="2983" x2="3157" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill165"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip166"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="3318" y1="2887" x2="3318" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill167"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip168"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="3478" y1="2682" x2="3478" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill169"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip170"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="3638" y1="2517" x2="3638" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill171"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip172"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="3798" y1="3038" x2="3798" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill173"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip174"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="3958" y1="3017" x2="3958" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill175"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip176"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="4118" y1="2952" x2="4118" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill177"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip178"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="4278" y1="2851" x2="4278" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill179"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip180"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="4439" y1="2793" x2="4439" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill181"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip182"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="4599" y1="3040" x2="4599" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill183"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip184"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="4759" y1="3023" x2="4759" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill185"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip186"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="4919" y1="2965" x2="4919" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill187"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip188"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="5079" y1="2882" x2="5079" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill189"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip190"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="5239" y1="2854" x2="5239" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill191"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip192"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="5400" y1="3049" x2="5400" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill193"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip194"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="5560" y1="3035" x2="5560" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill195"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip196"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="5720" y1="3004" x2="5720" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill197"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip198"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="5880" y1="2969" x2="5880" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill199"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip200"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="6040" y1="2949" x2="6040" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill201"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip202"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="6200" y1="3047" x2="6200" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill203"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip204"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="6360" y1="3024" x2="6360" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill205"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip206"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="6521" y1="2963" x2="6521" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill207"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip208"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="6681" y1="2922" x2="6681" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill209"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip210"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="6841" y1="2910" x2="6841" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill211"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip212"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="1430" y1="2930" x2="1430" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill213"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip214"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="1591" y1="2904" x2="1591" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill215"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip216"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="1751" y1="2689" x2="1751" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill217"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip218"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="1911" y1="1833" x2="1911" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill219"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip220"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="2071" y1="1269" x2="2071" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill221"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip222"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="2231" y1="2980" x2="2231" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill223"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip224"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="2391" y1="2959" x2="2391" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill225"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip226"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="2552" y1="2832" x2="2552" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill227"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip228"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="2712" y1="2445" x2="2712" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill229"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip230"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="2872" y1="2148" x2="2872" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill231"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip232"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="3032" y1="3015" x2="3032" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill233"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip234"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="3192" y1="3001" x2="3192" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill235"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip236"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="3352" y1="2916" x2="3352" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill237"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip238"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="3512" y1="2734" x2="3512" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill239"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip240"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="3673" y1="2603" x2="3673" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill241"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip242"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="3833" y1="3035" x2="3833" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill243"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip244"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="3993" y1="3025" x2="3993" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill245"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip246"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="4153" y1="2960" x2="4153" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill247"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip248"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="4313" y1="2860" x2="4313" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill249"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip250"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="4473" y1="2811" x2="4473" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill251"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip252"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="4634" y1="3041" x2="4634" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill253"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip254"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="4794" y1="3034" x2="4794" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill255"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip256"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="4954" y1="2986" x2="4954" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill257"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip258"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="5114" y1="2908" x2="5114" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill259"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip260"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="5274" y1="2871" x2="5274" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill261"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip262"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="5434" y1="3047" x2="5434" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill263"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip264"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="5594" y1="3043" x2="5594" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill265"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip266"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="5755" y1="3016" x2="5755" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill267"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip268"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="5915" y1="2986" x2="5915" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill269"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip270"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="6075" y1="2966" x2="6075" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill271"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip272"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="6235" y1="3041" x2="6235" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill273"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip274"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="6395" y1="3034" x2="6395" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill275"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip276"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="6555" y1="2993" x2="6555" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill277"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip278"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="6715" y1="2950" x2="6715" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill279"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip280"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="6876" y1="2939" x2="6876" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill281"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip282"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip283"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip284"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip285"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip286"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip287"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip288"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip289"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip290"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip291"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip292"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip293"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip294"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip295"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip296"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip297"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip298"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip299"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip300"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip301"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip302"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip303"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip304"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip305"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip306"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip307"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip308"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip309"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip310"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip311"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip312"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip313"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip314"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip315"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip316"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip317"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip318"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip319"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip320"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip321"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip322"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip323"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip324"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip325"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip326"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip327"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip328"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip329"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip330"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip331"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip332"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip333"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip334"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip335"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip336"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip337"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip338"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip339"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip340"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip341"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><linearGradient x1="3418.5" y1="3473" x2="3418.5" y2="3496" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill342"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip343"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip344"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><linearGradient x1="3777.5" y1="3473" x2="3777.5" y2="3496" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill345"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip346"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip347"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><linearGradient x1="3997" y1="3473" x2="3997" y2="3496" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill348"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip349"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip350"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><linearGradient x1="4268.5" y1="3473" x2="4268.5" y2="3496" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill351"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip352"><rect x="1075" y="68" width="5879" height="3482"/></clipPath></defs><g clip-path="url(#clip0)" transform="translate(-1075 -68)"><rect x="1076" y="69.0001" width="5878" height="3482" fill="#FFFFFF"/><g clip-path="url(#clip1)"><path d="M1298.5 2653.59 6903.5 2653.59M1298.5 2186.57 6903.5 2186.57M1298.5 1719.56 6903.5 1719.56M1298.5 1251.54 6903.5 1251.54M1298.5 784.526 6903.5 784.526M1298.5 317.5 6903.5 317.5" stroke="#E0E5EB" stroke-width="3.4375" stroke-linejoin="round" stroke-miterlimit="10" fill="none"/></g><g clip-path="url(#clip2)"><rect x="1312" y="2935" width="27.9999" height="186" fill="url(#fill3)"/></g><g clip-path="url(#clip4)"><rect x="1472" y="2908" width="28.0002" height="213" fill="url(#fill5)"/></g><g clip-path="url(#clip6)"><rect x="1633" y="2696" width="27.9999" height="425" fill="url(#fill7)"/></g><g clip-path="url(#clip8)"><rect x="1793" y="1861" width="27.9999" height="1260" fill="url(#fill9)"/></g><g clip-path="url(#clip10)"><rect x="1953" y="1299" width="28.0002" height="1822" fill="url(#fill11)"/></g><g clip-path="url(#clip12)"><rect x="2113" y="2995" width="27.9998" height="126" fill="url(#fill13)"/></g><g clip-path="url(#clip14)"><rect x="2273" y="2980" width="28.0002" height="141" fill="url(#fill15)"/></g><g clip-path="url(#clip16)"><rect x="2433" y="2866" width="27.9998" height="255" fill="url(#fill17)"/></g><g clip-path="url(#clip18)"><rect x="2594" y="2452" width="28.0002" height="669" fill="url(#fill19)"/></g><g clip-path="url(#clip20)"><rect x="2754" y="2194" width="27.9998" height="927" fill="url(#fill21)"/></g><g clip-path="url(#clip22)"><rect x="2914" y="3025" width="28.0002" height="96" fill="url(#fill23)"/></g><g clip-path="url(#clip24)"><rect x="3074" y="3012" width="27.9998" height="109" fill="url(#fill25)"/></g><g clip-path="url(#clip26)"><rect x="3234" y="2940" width="27.9998" height="181" fill="url(#fill27)"/></g><g clip-path="url(#clip28)"><rect x="3394" y="2760" width="28.0002" height="361" fill="url(#fill29)"/></g><g clip-path="url(#clip30)"><rect x="3554" y="2641" width="27.9998" height="480" fill="url(#fill31)"/></g><g clip-path="url(#clip32)"><rect x="3715" y="3039" width="28.0002" height="81.9998" fill="url(#fill33)"/></g><g clip-path="url(#clip34)"><rect x="3875" y="3032" width="27.9998" height="89" fill="url(#fill35)"/></g><g clip-path="url(#clip36)"><rect x="4035" y="2985" width="28.0002" height="136" fill="url(#fill37)"/></g><g clip-path="url(#clip38)"><rect x="4195" y="2888" width="28" height="233" fill="url(#fill39)"/></g><g clip-path="url(#clip40)"><rect x="4355" y="2843" width="28" height="278" fill="url(#fill41)"/></g><g clip-path="url(#clip42)"><rect x="4515" y="3045" width="28" height="76" fill="url(#fill43)"/></g><g clip-path="url(#clip44)"><rect x="4676" y="3039" width="28" height="81.9998" fill="url(#fill45)"/></g><g clip-path="url(#clip46)"><rect x="4836" y="2996" width="28" height="125" fill="url(#fill47)"/></g><g clip-path="url(#clip48)"><rect x="4996" y="2929" width="28" height="192" fill="url(#fill49)"/></g><g clip-path="url(#clip50)"><rect x="5156" y="2897" width="28" height="224" fill="url(#fill51)"/></g><g clip-path="url(#clip52)"><rect x="5316" y="3050" width="28" height="70.9998" fill="url(#fill53)"/></g><g clip-path="url(#clip54)"><rect x="5476" y="3043" width="28" height="78" fill="url(#fill55)"/></g><g clip-path="url(#clip56)"><rect x="5636" y="3015" width="28" height="106" fill="url(#fill57)"/></g><g clip-path="url(#clip58)"><rect x="5797" y="2982" width="28" height="139" fill="url(#fill59)"/></g><g clip-path="url(#clip60)"><rect x="5957" y="2968" width="28" height="153" fill="url(#fill61)"/></g><g clip-path="url(#clip62)"><rect x="6117" y="3047" width="28" height="74" fill="url(#fill63)"/></g><g clip-path="url(#clip64)"><rect x="6277" y="3039" width="28" height="81.9998" fill="url(#fill65)"/></g><g clip-path="url(#clip66)"><rect x="6437" y="3009" width="28" height="112" fill="url(#fill67)"/></g><g clip-path="url(#clip68)"><rect x="6597" y="2984" width="28" height="137" fill="url(#fill69)"/></g><g clip-path="url(#clip70)"><rect x="6757" y="2968" width="28" height="153" fill="url(#fill71)"/></g><g clip-path="url(#clip72)"><rect x="1347" y="2993" width="27.9999" height="128" fill="url(#fill73)"/></g><g clip-path="url(#clip74)"><rect x="1507" y="2981" width="27.9999" height="140" fill="url(#fill75)"/></g><g clip-path="url(#clip76)"><rect x="1667" y="2844" width="28.0002" height="277" fill="url(#fill77)"/></g><g clip-path="url(#clip78)"><rect x="1827" y="2181" width="27.9999" height="940" fill="url(#fill79)"/></g><g clip-path="url(#clip80)"><rect x="1988" y="1605" width="28.0002" height="1516" fill="url(#fill81)"/></g><g clip-path="url(#clip82)"><rect x="2148" y="3062" width="27.9998" height="58.9998" fill="url(#fill83)"/></g><g clip-path="url(#clip84)"><rect x="2308" y="3056" width="28.0002" height="65" fill="url(#fill85)"/></g><g clip-path="url(#clip86)"><rect x="2468" y="2982" width="27.9998" height="139" fill="url(#fill87)"/></g><g clip-path="url(#clip88)"><rect x="2628" y="2610" width="28.0002" height="511" fill="url(#fill89)"/></g><g clip-path="url(#clip90)"><rect x="2788" y="2395" width="27.9998" height="726" fill="url(#fill91)"/></g><g clip-path="url(#clip92)"><rect x="2949" y="3088" width="27.9998" height="33" fill="url(#fill93)"/></g><g clip-path="url(#clip94)"><rect x="3109" y="3085" width="28.0002" height="35.9998" fill="url(#fill95)"/></g><g clip-path="url(#clip96)"><rect x="3269" y="3049" width="27.9998" height="71.9998" fill="url(#fill97)"/></g><g clip-path="url(#clip98)"><rect x="3429" y="2857" width="28.0002" height="264" fill="url(#fill99)"/></g><g clip-path="url(#clip100)"><rect x="3589" y="2758" width="27.9998" height="363" fill="url(#fill101)"/></g><g clip-path="url(#clip102)"><rect x="3749" y="3103" width="28.0002" height="17.9998" fill="url(#fill103)"/></g><g clip-path="url(#clip104)"><rect x="3909" y="3101" width="27.9998" height="20" fill="url(#fill105)"/></g><g clip-path="url(#clip106)"><rect x="4070" y="3082" width="28" height="38.9998" fill="url(#fill107)"/></g><g clip-path="url(#clip108)"><rect x="4230" y="2983" width="28" height="138" fill="url(#fill109)"/></g><g clip-path="url(#clip110)"><rect x="4390" y="2933" width="28" height="188" fill="url(#fill111)"/></g><g clip-path="url(#clip112)"><rect x="4550" y="3105" width="28" height="15.9998" fill="url(#fill113)"/></g><g clip-path="url(#clip114)"><rect x="4710" y="3103" width="28" height="17.9998" fill="url(#fill115)"/></g><g clip-path="url(#clip116)"><rect x="4870" y="3090" width="28" height="31" fill="url(#fill117)"/></g><g clip-path="url(#clip118)"><rect x="5031" y="3033" width="28" height="88" fill="url(#fill119)"/></g><g clip-path="url(#clip120)"><rect x="5191" y="3003" width="28" height="118" fill="url(#fill121)"/></g><g clip-path="url(#clip122)"><rect x="5351" y="3111" width="28" height="10" fill="url(#fill123)"/></g><g clip-path="url(#clip124)"><rect x="5511" y="3110" width="28" height="11" fill="url(#fill125)"/></g><g clip-path="url(#clip126)"><rect x="5671" y="3104" width="28" height="16.9998" fill="url(#fill127)"/></g><g clip-path="url(#clip128)"><rect x="5831" y="3069" width="28" height="52" fill="url(#fill129)"/></g><g clip-path="url(#clip130)"><rect x="5991" y="3044" width="28" height="77" fill="url(#fill131)"/></g><g clip-path="url(#clip132)"><rect x="6152" y="3108" width="28" height="13" fill="url(#fill133)"/></g><g clip-path="url(#clip134)"><rect x="6312" y="3106" width="28" height="14.9998" fill="url(#fill135)"/></g><g clip-path="url(#clip136)"><rect x="6472" y="3097" width="28" height="24" fill="url(#fill137)"/></g><g clip-path="url(#clip138)"><rect x="6632" y="3074" width="28" height="46.9998" fill="url(#fill139)"/></g><g clip-path="url(#clip140)"><rect x="6792" y="3050" width="28" height="70.9998" fill="url(#fill141)"/></g><g clip-path="url(#clip142)"><rect x="1382" y="2936" width="28.0002" height="185" fill="url(#fill143)"/></g><g clip-path="url(#clip144)"><rect x="1542" y="2908" width="27.9999" height="213" fill="url(#fill145)"/></g><g clip-path="url(#clip146)"><rect x="1702" y="2639" width="28.0002" height="482" fill="url(#fill147)"/></g><g clip-path="url(#clip148)"><rect x="1862" y="1645" width="27.9999" height="1476" fill="url(#fill149)"/></g><g clip-path="url(#clip150)"><rect x="2022" y="867" width="28.0001" height="2254" fill="url(#fill151)"/></g><g clip-path="url(#clip152)"><rect x="2182" y="2997" width="27.9998" height="124" fill="url(#fill153)"/></g><g clip-path="url(#clip154)"><rect x="2343" y="2912" width="27.9998" height="209" fill="url(#fill155)"/></g><g clip-path="url(#clip156)"><rect x="2503" y="2756" width="28.0002" height="365" fill="url(#fill157)"/></g><g clip-path="url(#clip158)"><rect x="2663" y="2327" width="27.9998" height="794" fill="url(#fill159)"/></g><g clip-path="url(#clip160)"><rect x="2823" y="1967" width="28.0002" height="1154" fill="url(#fill161)"/></g><g clip-path="url(#clip162)"><rect x="2983" y="3021" width="27.9998" height="100" fill="url(#fill163)"/></g><g clip-path="url(#clip164)"><rect x="3143" y="2983" width="28.0002" height="138" fill="url(#fill165)"/></g><g clip-path="url(#clip166)"><rect x="3304" y="2887" width="27.9998" height="234" fill="url(#fill167)"/></g><g clip-path="url(#clip168)"><rect x="3464" y="2682" width="28.0002" height="439" fill="url(#fill169)"/></g><g clip-path="url(#clip170)"><rect x="3624" y="2517" width="27.9998" height="604" fill="url(#fill171)"/></g><g clip-path="url(#clip172)"><rect x="3784" y="3038" width="27.9998" height="82.9998" fill="url(#fill173)"/></g><g clip-path="url(#clip174)"><rect x="3944" y="3017" width="28.0002" height="104" fill="url(#fill175)"/></g><g clip-path="url(#clip176)"><rect x="4104" y="2952" width="28" height="169" fill="url(#fill177)"/></g><g clip-path="url(#clip178)"><rect x="4264" y="2851" width="28" height="270" fill="url(#fill179)"/></g><g clip-path="url(#clip180)"><rect x="4425" y="2793" width="28" height="328" fill="url(#fill181)"/></g><g clip-path="url(#clip182)"><rect x="4585" y="3040" width="28" height="80.9998" fill="url(#fill183)"/></g><g clip-path="url(#clip184)"><rect x="4745" y="3023" width="28" height="98" fill="url(#fill185)"/></g><g clip-path="url(#clip186)"><rect x="4905" y="2965" width="28" height="156" fill="url(#fill187)"/></g><g clip-path="url(#clip188)"><rect x="5065" y="2882" width="28" height="239" fill="url(#fill189)"/></g><g clip-path="url(#clip190)"><rect x="5225" y="2854" width="28" height="267" fill="url(#fill191)"/></g><g clip-path="url(#clip192)"><rect x="5386" y="3049" width="28" height="71.9998" fill="url(#fill193)"/></g><g clip-path="url(#clip194)"><rect x="5546" y="3035" width="28" height="86" fill="url(#fill195)"/></g><g clip-path="url(#clip196)"><rect x="5706" y="3004" width="28" height="117" fill="url(#fill197)"/></g><g clip-path="url(#clip198)"><rect x="5866" y="2969" width="28" height="152" fill="url(#fill199)"/></g><g clip-path="url(#clip200)"><rect x="6026" y="2949" width="28" height="172" fill="url(#fill201)"/></g><g clip-path="url(#clip202)"><rect x="6186" y="3047" width="28" height="74" fill="url(#fill203)"/></g><g clip-path="url(#clip204)"><rect x="6346" y="3024" width="28" height="97" fill="url(#fill205)"/></g><g clip-path="url(#clip206)"><rect x="6507" y="2963" width="28" height="158" fill="url(#fill207)"/></g><g clip-path="url(#clip208)"><rect x="6667" y="2922" width="28" height="199" fill="url(#fill209)"/></g><g clip-path="url(#clip210)"><rect x="6827" y="2910" width="28" height="211" fill="url(#fill211)"/></g><g clip-path="url(#clip212)"><rect x="1416" y="2930" width="28.0002" height="191" fill="url(#fill213)"/></g><g clip-path="url(#clip214)"><rect x="1577" y="2904" width="27.9999" height="217" fill="url(#fill215)"/></g><g clip-path="url(#clip216)"><rect x="1737" y="2689" width="28.0002" height="432" fill="url(#fill217)"/></g><g clip-path="url(#clip218)"><rect x="1897" y="1833" width="27.9999" height="1288" fill="url(#fill219)"/></g><g clip-path="url(#clip220)"><rect x="2057" y="1269" width="27.9998" height="1852" fill="url(#fill221)"/></g><g clip-path="url(#clip222)"><rect x="2217" y="2980" width="28.0002" height="141" fill="url(#fill223)"/></g><g clip-path="url(#clip224)"><rect x="2377" y="2959" width="27.9998" height="162" fill="url(#fill225)"/></g><g clip-path="url(#clip226)"><rect x="2538" y="2832" width="28.0002" height="289" fill="url(#fill227)"/></g><g clip-path="url(#clip228)"><rect x="2698" y="2445" width="27.9998" height="676" fill="url(#fill229)"/></g><g clip-path="url(#clip230)"><rect x="2858" y="2148" width="28.0002" height="973" fill="url(#fill231)"/></g><g clip-path="url(#clip232)"><rect x="3018" y="3015" width="27.9998" height="106" fill="url(#fill233)"/></g><g clip-path="url(#clip234)"><rect x="3178" y="3001" width="28.0002" height="120" fill="url(#fill235)"/></g><g clip-path="url(#clip236)"><rect x="3338" y="2916" width="27.9998" height="205" fill="url(#fill237)"/></g><g clip-path="url(#clip238)"><rect x="3498" y="2734" width="27.9998" height="387" fill="url(#fill239)"/></g><g clip-path="url(#clip240)"><rect x="3659" y="2603" width="28.0002" height="518" fill="url(#fill241)"/></g><g clip-path="url(#clip242)"><rect x="3819" y="3035" width="27.9998" height="86" fill="url(#fill243)"/></g><g clip-path="url(#clip244)"><rect x="3979" y="3025" width="28.0002" height="96" fill="url(#fill245)"/></g><g clip-path="url(#clip246)"><rect x="4139" y="2960" width="28" height="161" fill="url(#fill247)"/></g><g clip-path="url(#clip248)"><rect x="4299" y="2860" width="28" height="261" fill="url(#fill249)"/></g><g clip-path="url(#clip250)"><rect x="4459" y="2811" width="28" height="310" fill="url(#fill251)"/></g><g clip-path="url(#clip252)"><rect x="4620" y="3041" width="28" height="79.9998" fill="url(#fill253)"/></g><g clip-path="url(#clip254)"><rect x="4780" y="3034" width="28" height="87" fill="url(#fill255)"/></g><g clip-path="url(#clip256)"><rect x="4940" y="2986" width="28" height="135" fill="url(#fill257)"/></g><g clip-path="url(#clip258)"><rect x="5100" y="2908" width="28" height="213" fill="url(#fill259)"/></g><g clip-path="url(#clip260)"><rect x="5260" y="2871" width="28" height="250" fill="url(#fill261)"/></g><g clip-path="url(#clip262)"><rect x="5420" y="3047" width="28" height="74" fill="url(#fill263)"/></g><g clip-path="url(#clip264)"><rect x="5580" y="3043" width="28" height="78" fill="url(#fill265)"/></g><g clip-path="url(#clip266)"><rect x="5741" y="3016" width="28" height="105" fill="url(#fill267)"/></g><g clip-path="url(#clip268)"><rect x="5901" y="2986" width="28" height="135" fill="url(#fill269)"/></g><g clip-path="url(#clip270)"><rect x="6061" y="2966" width="28" height="155" fill="url(#fill271)"/></g><g clip-path="url(#clip272)"><rect x="6221" y="3041" width="28" height="79.9998" fill="url(#fill273)"/></g><g clip-path="url(#clip274)"><rect x="6381" y="3034" width="28" height="87" fill="url(#fill275)"/></g><g clip-path="url(#clip276)"><rect x="6541" y="2993" width="28" height="128" fill="url(#fill277)"/></g><g clip-path="url(#clip278)"><rect x="6701" y="2950" width="28" height="171" fill="url(#fill279)"/></g><g clip-path="url(#clip280)"><rect x="6862" y="2939" width="28" height="182" fill="url(#fill281)"/></g><g clip-path="url(#clip282)"><path d="M1298.5 3121.5 6903.5 3121.5" stroke="#E0E5EB" stroke-width="3.4375" stroke-linejoin="round" stroke-miterlimit="10" fill="none" fill-rule="evenodd"/></g><g clip-path="url(#clip283)"><path d="M1298.5 3121.5 1298.5 3201.5M2098.57 3121.5 2098.57 3201.5M2899.6 3121.5 2899.6 3201.5M3700.62 3121.5 3700.62 3201.5M4501.65 3121.5 4501.65 3201.5M5301.67 3121.5 5301.67 3201.5M6102.7 3121.5 6102.7 3201.5M6903.5 3121.5 6903.5 3201.5" stroke="#E0E5EB" stroke-width="3.4375" stroke-linejoin="round" stroke-miterlimit="10" fill="none"/></g><g clip-path="url(#clip284)"><path d="M1298.5 3201.5 1298.5 3281.5M2098.57 3201.5 2098.57 3281.5M2899.6 3201.5 2899.6 3281.5M3700.62 3201.5 3700.62 3281.5M4501.65 3201.5 4501.65 3281.5M5301.67 3201.5 5301.67 3281.5M6102.7 3201.5 6102.7 3281.5M6903.5 3201.5 6903.5 3281.5" stroke="#E0E5EB" stroke-width="3.4375" stroke-linejoin="round" stroke-miterlimit="10" fill="none"/></g><g clip-path="url(#clip285)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1239.13 3133)">0</text></g><g clip-path="url(#clip286)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1218.23 2665)">50</text></g><g clip-path="url(#clip287)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1197.33 2198)">100</text></g><g clip-path="url(#clip288)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1197.33 1731)">150</text></g><g clip-path="url(#clip289)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1197.33 1263)">200</text></g><g clip-path="url(#clip290)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1197.33 796)">250</text></g><g clip-path="url(#clip291)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1197.33 329)">300</text></g><g clip-path="url(#clip292)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1357.47 3186)">20</text></g><g clip-path="url(#clip293)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1517.62 3186)">32</text></g><g clip-path="url(#clip294)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1667.33 3186)">128</text></g><g clip-path="url(#clip295)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1827.48 3186)">384</text></g><g clip-path="url(#clip296)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1987.63 3186)">512</text></g><g clip-path="url(#clip297)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2158.23 3186)">20</text></g><g clip-path="url(#clip298)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2318.39 3186)">32</text></g><g clip-path="url(#clip299)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2468.09 3186)">128</text></g><g clip-path="url(#clip300)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2628.24 3186)">384</text></g><g clip-path="url(#clip301)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2788.4 3186)">512</text></g><g clip-path="url(#clip302)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2959 3186)">20</text></g><g clip-path="url(#clip303)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3119.15 3186)">32</text></g><g clip-path="url(#clip304)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3268.86 3186)">128</text></g><g clip-path="url(#clip305)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3429.01 3186)">384</text></g><g clip-path="url(#clip306)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3589.16 3186)">512</text></g><g clip-path="url(#clip307)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3759.76 3186)">20</text></g><g clip-path="url(#clip308)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3919.92 3186)">32</text></g><g clip-path="url(#clip309)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4069.62 3186)">128</text></g><g clip-path="url(#clip310)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4229.77 3186)">384</text></g><g clip-path="url(#clip311)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4389.93 3186)">512</text></g><g clip-path="url(#clip312)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4560.53 3186)">20</text></g><g clip-path="url(#clip313)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4720.68 3186)">32</text></g><g clip-path="url(#clip314)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4870.38 3186)">128</text></g><g clip-path="url(#clip315)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5030.54 3186)">384</text></g><g clip-path="url(#clip316)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5190.69 3186)">512</text></g><g clip-path="url(#clip317)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5361.29 3186)">20</text></g><g clip-path="url(#clip318)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5521.45 3186)">32</text></g><g clip-path="url(#clip319)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5671.15 3186)">128</text></g><g clip-path="url(#clip320)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5831.3 3186)">384</text></g><g clip-path="url(#clip321)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5991.45 3186)">512</text></g><g clip-path="url(#clip322)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6162.06 3186)">20</text></g><g clip-path="url(#clip323)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6322.21 3186)">32</text></g><g clip-path="url(#clip324)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6471.91 3186)">128</text></g><g clip-path="url(#clip325)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6632.07 3186)">384</text></g><g clip-path="url(#clip326)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6792.22 3186)">512</text></g><g clip-path="url(#clip327)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1688.23 3266)">1</text></g><g clip-path="url(#clip328)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2488.99 3266)">2</text></g><g clip-path="url(#clip329)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3289.76 3266)">4</text></g><g clip-path="url(#clip330)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4090.52 3266)">8</text></g><g clip-path="url(#clip331)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4880.83 3266)">16</text></g><g clip-path="url(#clip332)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5681.6 3266)">40</text></g><g clip-path="url(#clip333)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6482.36 3266)">80</text></g><g clip-path="url(#clip334)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="41" transform="matrix(6.12323e-17 -1 1 6.12323e-17 1172.23 1828)">Latency (ms)</text></g><g clip-path="url(#clip335)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="41" transform="matrix(1 0 0 1 3955.41 3327)">Sequence Length</text></g><g clip-path="url(#clip336)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="41" transform="matrix(1 0 0 1 3827.88 3378)">Number of computational cores</text></g><g clip-path="url(#clip337)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="73" transform="matrix(1 0 0 1 2711.67 173)">Intel Ice lake Xeon 8380 </text></g><g clip-path="url(#clip338)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="73" transform="matrix(1 0 0 1 3447.02 173)">-</text></g><g clip-path="url(#clip339)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="73" transform="matrix(1 0 0 1 3486.05 173)">Eager Mode Frameworks Latency Benchmark (batch size = 1)</text></g><g clip-path="url(#clip340)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-style="italic" font-weight="700" font-size="55" transform="matrix(1 0 0 1 3192.44 245)">(some data might be missing indicating benchmark crashed during runs)</text></g><g clip-path="url(#clip341)"><rect x="3407" y="3473" width="23" height="23" fill="url(#fill342)"/></g><g clip-path="url(#clip343)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3439.74 3496)">intel-tensorflow</text></g><g clip-path="url(#clip344)"><rect x="3766" y="3473" width="23" height="23" fill="url(#fill345)"/></g><g clip-path="url(#clip346)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3798.68 3496)">pytorch</text></g><g clip-path="url(#clip347)"><rect x="3986" y="3473" width="22" height="23" fill="url(#fill348)"/></g><g clip-path="url(#clip349)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4017.88 3496)">tensorflow</text></g><g clip-path="url(#clip350)"><rect x="4257" y="3473" width="23" height="23" fill="url(#fill351)"/></g><g clip-path="url(#clip352)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4289.23 3496)">tensorflow + onednn</text></g><rect x="1076.5" y="69.4999" width="5878" height="3482" stroke="#E0E5EB" stroke-width="3.4375" stroke-linejoin="round" stroke-miterlimit="10" fill="none"/></g></svg> | 2 |
0 | hf_public_repos/blog/assets/35_bert_cpu_scaling_part_2 | hf_public_repos/blog/assets/35_bert_cpu_scaling_part_2/baselines/graph_mode_tensorflow_onednn_baseline.svg | <svg width="5699" height="3155" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" overflow="hidden"><defs><clipPath id="clip0"><rect x="1436" y="130" width="5699" height="3155"/></clipPath><clipPath id="clip1"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip2"><rect x="1680" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="1835.5" y1="2825" x2="1835.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill3"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip4"><rect x="1680" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="2916.5" y1="2812" x2="2916.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill5"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip6"><rect x="1680" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="3996.5" y1="2606" x2="3996.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill7"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip8"><rect x="1680" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="5077.5" y1="1602" x2="5077.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill9"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip10"><rect x="1680" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="6157.5" y1="851" x2="6157.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill11"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip12"><rect x="1680" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="2028.5" y1="2931" x2="2028.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill13"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip14"><rect x="1680" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="3108.5" y1="2923" x2="3108.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill15"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip16"><rect x="1680" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="4189.5" y1="2858" x2="4189.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill17"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip18"><rect x="1680" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="5269.5" y1="2625" x2="5269.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill19"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip20"><rect x="1680" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="6350.5" y1="2479" x2="6350.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill21"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip22"><rect x="1680" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="2220.5" y1="2948" x2="2220.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill23"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip24"><rect x="1680" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="3301.5" y1="2942" x2="3301.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill25"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip26"><rect x="1680" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="4381.5" y1="2889" x2="4381.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill27"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip28"><rect x="1680" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="5462.5" y1="2817" x2="5462.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill29"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip30"><rect x="1680" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="6542.5" y1="2775" x2="6542.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill31"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip32"><rect x="1680" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="2413.5" y1="2954" x2="2413.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill33"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip34"><rect x="1680" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="3493.5" y1="2949" x2="3493.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill35"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip36"><rect x="1680" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="4574.5" y1="2927" x2="4574.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill37"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip38"><rect x="1680" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="5654.5" y1="2886" x2="5654.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill39"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip40"><rect x="1680" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="6735.5" y1="2872" x2="6735.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill41"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip42"><rect x="1680" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="2605.5" y1="2946" x2="2605.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill43"><stop offset="0" stop-color="#71A6DB"/><stop offset="0.5" stop-color="#559BDB"/><stop offset="1" stop-color="#438AC9"/></linearGradient><clipPath id="clip44"><rect x="1680" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="3686.5" y1="2947" x2="3686.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill45"><stop offset="0" stop-color="#71A6DB"/><stop offset="0.5" stop-color="#559BDB"/><stop offset="1" stop-color="#438AC9"/></linearGradient><clipPath id="clip46"><rect x="1680" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="4766.5" y1="2909" x2="4766.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill47"><stop offset="0" stop-color="#71A6DB"/><stop offset="0.5" stop-color="#559BDB"/><stop offset="1" stop-color="#438AC9"/></linearGradient><clipPath id="clip48"><rect x="1680" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="5847.5" y1="2863" x2="5847.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill49"><stop offset="0" stop-color="#71A6DB"/><stop offset="0.5" stop-color="#559BDB"/><stop offset="1" stop-color="#438AC9"/></linearGradient><clipPath id="clip50"><rect x="1680" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="6927.5" y1="2852" x2="6927.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill51"><stop offset="0" stop-color="#71A6DB"/><stop offset="0.5" stop-color="#559BDB"/><stop offset="1" stop-color="#438AC9"/></linearGradient><clipPath id="clip52"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip53"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip54"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip55"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip56"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip57"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip58"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip59"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip60"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip61"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip62"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip63"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip64"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip65"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip66"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip67"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip68"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip69"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip70"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip71"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip72"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip73"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip74"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip75"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip76"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip77"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip78"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip79"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip80"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip81"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip82"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip83"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip84"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip85"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip86"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip87"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip88"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip89"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip90"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip91"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip92"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip93"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip94"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip95"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip96"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip97"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip98"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip99"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip100"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><linearGradient x1="4088.5" y1="3206" x2="4088.5" y2="3228" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill101"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip102"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip103"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><linearGradient x1="4167.5" y1="3206" x2="4167.5" y2="3228" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill104"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip105"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip106"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><linearGradient x1="4247" y1="3206" x2="4247" y2="3228" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill107"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip108"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip109"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><linearGradient x1="4347" y1="3206" x2="4347" y2="3228" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill110"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip111"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip112"><rect x="1437" y="130" width="5696" height="3153"/></clipPath><linearGradient x1="4447.5" y1="3206" x2="4447.5" y2="3228" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill113"><stop offset="0" stop-color="#71A6DB"/><stop offset="0.5" stop-color="#559BDB"/><stop offset="1" stop-color="#438AC9"/></linearGradient><clipPath id="clip114"><rect x="1437" y="130" width="5696" height="3153"/></clipPath></defs><g clip-path="url(#clip0)" transform="translate(-1436 -130)"><rect x="1437" y="131" width="5696" height="3152" fill="#FFFFFF"/><g clip-path="url(#clip1)"><path d="M1680.5 2703.59 7082.5 2703.59M1680.5 2427.58 7082.5 2427.58M1680.5 2150.57 7082.5 2150.57M1680.5 1874.56 7082.5 1874.56M1680.5 1597.55 7082.5 1597.55M1680.5 1320.54 7082.5 1320.54M1680.5 1044.53 7082.5 1044.53M1680.5 767.525 7082.5 767.525M1680.5 491.5 7082.5 491.5" stroke="#E0E5EB" stroke-width="3.4375" stroke-linejoin="round" stroke-miterlimit="10" fill="none"/></g><g clip-path="url(#clip2)"><rect x="1758" y="2825" width="155" height="155" fill="url(#fill3)"/></g><g clip-path="url(#clip4)"><rect x="2839" y="2812" width="155" height="168" fill="url(#fill5)"/></g><g clip-path="url(#clip6)"><rect x="3919" y="2606" width="155" height="374" fill="url(#fill7)"/></g><g clip-path="url(#clip8)"><rect x="5000" y="1602" width="155" height="1378" fill="url(#fill9)"/></g><g clip-path="url(#clip10)"><rect x="6080" y="851" width="155" height="2129" fill="url(#fill11)"/></g><g clip-path="url(#clip12)"><rect x="1951" y="2931" width="155" height="48.9998" fill="url(#fill13)"/></g><g clip-path="url(#clip14)"><rect x="3031" y="2923" width="155" height="57" fill="url(#fill15)"/></g><g clip-path="url(#clip16)"><rect x="4112" y="2858" width="155" height="122" fill="url(#fill17)"/></g><g clip-path="url(#clip18)"><rect x="5192" y="2625" width="155" height="355" fill="url(#fill19)"/></g><g clip-path="url(#clip20)"><rect x="6273" y="2479" width="155" height="501" fill="url(#fill21)"/></g><g clip-path="url(#clip22)"><rect x="2143" y="2948" width="155" height="32" fill="url(#fill23)"/></g><g clip-path="url(#clip24)"><rect x="3224" y="2942" width="155" height="37.9998" fill="url(#fill25)"/></g><g clip-path="url(#clip26)"><rect x="4304" y="2889" width="155" height="91" fill="url(#fill27)"/></g><g clip-path="url(#clip28)"><rect x="5385" y="2817" width="155" height="163" fill="url(#fill29)"/></g><g clip-path="url(#clip30)"><rect x="6465" y="2775" width="155" height="205" fill="url(#fill31)"/></g><g clip-path="url(#clip32)"><rect x="2336" y="2954" width="155" height="26.0002" fill="url(#fill33)"/></g><g clip-path="url(#clip34)"><rect x="3416" y="2949" width="155" height="31" fill="url(#fill35)"/></g><g clip-path="url(#clip36)"><rect x="4497" y="2927" width="155" height="53" fill="url(#fill37)"/></g><g clip-path="url(#clip38)"><rect x="5577" y="2886" width="155" height="93.9998" fill="url(#fill39)"/></g><g clip-path="url(#clip40)"><rect x="6658" y="2872" width="155" height="108" fill="url(#fill41)"/></g><g clip-path="url(#clip42)"><rect x="2528" y="2946" width="155" height="34" fill="url(#fill43)"/></g><g clip-path="url(#clip44)"><rect x="3609" y="2947" width="155" height="33" fill="url(#fill45)"/></g><g clip-path="url(#clip46)"><rect x="4689" y="2909" width="155" height="70.9998" fill="url(#fill47)"/></g><g clip-path="url(#clip48)"><rect x="5770" y="2863" width="155" height="117" fill="url(#fill49)"/></g><g clip-path="url(#clip50)"><rect x="6850" y="2852" width="155" height="128" fill="url(#fill51)"/></g><g clip-path="url(#clip52)"><path d="M1680.5 2980.5 7082.5 2980.5" stroke="#E0E5EB" stroke-width="3.4375" stroke-linejoin="round" stroke-miterlimit="10" fill="none" fill-rule="evenodd"/></g><g clip-path="url(#clip53)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1804.29 2793)">112</text></g><g clip-path="url(#clip54)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2884.8 2780)">122</text></g><g clip-path="url(#clip55)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3965.3 2574)">271</text></g><g clip-path="url(#clip56)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5045.81 1570)">997</text></g><g clip-path="url(#clip57)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6115.86 819)">1539</text></g><g clip-path="url(#clip58)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2007.24 2899)">36</text></g><g clip-path="url(#clip59)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3087.75 2891)">41</text></g><g clip-path="url(#clip60)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4168.26 2826)">89</text></g><g clip-path="url(#clip61)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5238.31 2594)">257</text></g><g clip-path="url(#clip62)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6318.82 2447)">363</text></g><g clip-path="url(#clip63)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2199.75 2916)">24</text></g><g clip-path="url(#clip64)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3280.25 2910)">28</text></g><g clip-path="url(#clip65)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4360.76 2857)">66</text></g><g clip-path="url(#clip66)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5430.82 2785)">119</text></g><g clip-path="url(#clip67)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6511.32 2743)">148</text></g><g clip-path="url(#clip68)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2392.25 2922)">19</text></g><g clip-path="url(#clip69)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3472.76 2918)">22</text></g><g clip-path="url(#clip70)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4553.26 2895)">39</text></g><g clip-path="url(#clip71)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5633.77 2854)">69</text></g><g clip-path="url(#clip72)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6714.27 2840)">79</text></g><g clip-path="url(#clip73)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2584.76 2914)">25</text></g><g clip-path="url(#clip74)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3665.26 2915)">24</text></g><g clip-path="url(#clip75)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4745.77 2877)">52</text></g><g clip-path="url(#clip76)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5826.27 2831)">85</text></g><g clip-path="url(#clip77)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6906.78 2820)">93</text></g><g clip-path="url(#clip78)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1621.23 2992)">0</text></g><g clip-path="url(#clip79)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1579.43 2715)">200</text></g><g clip-path="url(#clip80)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1579.43 2439)">400</text></g><g clip-path="url(#clip81)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1579.43 2162)">600</text></g><g clip-path="url(#clip82)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1579.43 1886)">800</text></g><g clip-path="url(#clip83)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1558.53 1609)">1000</text></g><g clip-path="url(#clip84)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1558.53 1332)">1200</text></g><g clip-path="url(#clip85)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1558.53 1056)">1400</text></g><g clip-path="url(#clip86)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1558.53 779)">1600</text></g><g clip-path="url(#clip87)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1558.53 503)">1800</text></g><g clip-path="url(#clip88)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2199.75 3046)">20</text></g><g clip-path="url(#clip89)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3280.25 3046)">32</text></g><g clip-path="url(#clip90)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4350.31 3046)">128</text></g><g clip-path="url(#clip91)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5430.82 3046)">384</text></g><g clip-path="url(#clip92)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6511.32 3046)">512</text></g><g clip-path="url(#clip93)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="41" transform="matrix(6.12323e-17 -1 1 6.12323e-17 1533.43 1844)">Latency (ms)</text></g><g clip-path="url(#clip94)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="41" transform="matrix(1 0 0 1 4240.77 3110)">Sequence Length</text></g><g clip-path="url(#clip95)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="73" transform="matrix(1 0 0 1 3055.68 235)">Intel Ice lake Xeon 8380 </text></g><g clip-path="url(#clip96)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="73" transform="matrix(1 0 0 1 3791.03 235)">-</text></g><g clip-path="url(#clip97)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="73" transform="matrix(1 0 0 1 3830.06 235)">TensorFlow (Graph Mode) + oneDNN enabled (>= 2.5.0)</text></g><g clip-path="url(#clip98)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="73" transform="matrix(1 0 0 1 3011.68 324)">Latency Benchmark with varying number of CPU cores involved in the computations</text></g><g clip-path="url(#clip99)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="73" transform="matrix(1 0 0 1 3192.77 414)">(some data might be missing indicating benchmark crashed during runs)</text></g><g clip-path="url(#clip100)"><rect x="4077" y="3206" width="23.0002" height="22" fill="url(#fill101)"/></g><g clip-path="url(#clip102)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4109.14 3229)">1</text></g><g clip-path="url(#clip103)"><rect x="4156" y="3206" width="23" height="22" fill="url(#fill104)"/></g><g clip-path="url(#clip105)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4188.48 3229)">4</text></g><g clip-path="url(#clip106)"><rect x="4236" y="3206" width="22" height="22" fill="url(#fill107)"/></g><g clip-path="url(#clip108)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4267.81 3229)">16</text></g><g clip-path="url(#clip109)"><rect x="4336" y="3206" width="22" height="22" fill="url(#fill110)"/></g><g clip-path="url(#clip111)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4368.05 3229)">40</text></g><g clip-path="url(#clip112)"><rect x="4436" y="3206" width="23" height="22" fill="url(#fill113)"/></g><g clip-path="url(#clip114)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4468.29 3229)">80</text></g><rect x="1437.5" y="131.5" width="5696" height="3152" stroke="#E0E5EB" stroke-width="3.4375" stroke-linejoin="round" stroke-miterlimit="10" fill="none"/></g></svg> | 3 |
0 | hf_public_repos/blog/assets/35_bert_cpu_scaling_part_2 | hf_public_repos/blog/assets/35_bert_cpu_scaling_part_2/baselines/graph_mode_baseline.svg | <svg width="5881" height="3485" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" overflow="hidden"><defs><clipPath id="clip0"><rect x="1075" y="68" width="5881" height="3485"/></clipPath><clipPath id="clip1"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip2"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="1326" y1="2935" x2="1326" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill3"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip4"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="1486" y1="2908" x2="1486" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill5"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip6"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="1647" y1="2696" x2="1647" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill7"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip8"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="1807" y1="1861" x2="1807" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill9"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip10"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="1967" y1="1299" x2="1967" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill11"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip12"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="2127" y1="2995" x2="2127" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill13"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip14"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="2287" y1="2980" x2="2287" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill15"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip16"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="2447" y1="2866" x2="2447" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill17"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip18"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="2608" y1="2452" x2="2608" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill19"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip20"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="2768" y1="2194" x2="2768" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill21"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip22"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="2928" y1="3025" x2="2928" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill23"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip24"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="3088" y1="3012" x2="3088" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill25"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip26"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="3248" y1="2940" x2="3248" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill27"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip28"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="3408" y1="2760" x2="3408" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill29"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip30"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="3568" y1="2641" x2="3568" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill31"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip32"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="3729" y1="3039" x2="3729" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill33"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip34"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="3889" y1="3032" x2="3889" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill35"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip36"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="4049" y1="2985" x2="4049" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill37"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip38"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="4209" y1="2888" x2="4209" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill39"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip40"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="4369" y1="2843" x2="4369" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill41"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip42"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="4529" y1="3045" x2="4529" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill43"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip44"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="4690" y1="3039" x2="4690" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill45"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip46"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="4850" y1="2996" x2="4850" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill47"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip48"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="5010" y1="2929" x2="5010" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill49"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip50"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="5170" y1="2897" x2="5170" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill51"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip52"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="5330" y1="3050" x2="5330" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill53"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip54"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="5490" y1="3043" x2="5490" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill55"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip56"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="5650" y1="3015" x2="5650" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill57"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip58"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="5811" y1="2982" x2="5811" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill59"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip60"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="5971" y1="2968" x2="5971" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill61"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip62"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="6131" y1="3047" x2="6131" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill63"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip64"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="6291" y1="3039" x2="6291" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill65"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip66"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="6451" y1="3009" x2="6451" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill67"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip68"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="6611" y1="2984" x2="6611" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill69"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip70"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="6771" y1="2968" x2="6771" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill71"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip72"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="1361" y1="2993" x2="1361" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill73"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip74"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="1521" y1="2981" x2="1521" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill75"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip76"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="1681" y1="2844" x2="1681" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill77"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip78"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="1841" y1="2181" x2="1841" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill79"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip80"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="2002" y1="1605" x2="2002" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill81"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip82"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="2162" y1="3062" x2="2162" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill83"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip84"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="2322" y1="3056" x2="2322" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill85"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip86"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="2482" y1="2982" x2="2482" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill87"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip88"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="2642" y1="2610" x2="2642" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill89"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip90"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="2802" y1="2395" x2="2802" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill91"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip92"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="2963" y1="3088" x2="2963" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill93"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip94"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="3123" y1="3085" x2="3123" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill95"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip96"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="3283" y1="3049" x2="3283" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill97"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip98"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="3443" y1="2857" x2="3443" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill99"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip100"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="3603" y1="2758" x2="3603" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill101"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip102"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="3763" y1="3103" x2="3763" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill103"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip104"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="3923" y1="3101" x2="3923" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill105"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip106"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="4084" y1="3082" x2="4084" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill107"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip108"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="4244" y1="2983" x2="4244" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill109"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip110"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="4404" y1="2933" x2="4404" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill111"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip112"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="4564" y1="3105" x2="4564" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill113"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip114"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="4724" y1="3103" x2="4724" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill115"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip116"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="4884" y1="3090" x2="4884" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill117"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip118"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="5045" y1="3033" x2="5045" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill119"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip120"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="5205" y1="3003" x2="5205" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill121"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip122"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="5365" y1="3111" x2="5365" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill123"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip124"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="5525" y1="3110" x2="5525" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill125"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip126"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="5685" y1="3104" x2="5685" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill127"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip128"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="5845" y1="3069" x2="5845" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill129"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip130"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="6005" y1="3044" x2="6005" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill131"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip132"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="6166" y1="3108" x2="6166" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill133"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip134"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="6326" y1="3106" x2="6326" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill135"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip136"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="6486" y1="3097" x2="6486" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill137"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip138"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="6646" y1="3074" x2="6646" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill139"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip140"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="6806" y1="3050" x2="6806" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill141"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip142"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="1396" y1="2936" x2="1396" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill143"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip144"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="1556" y1="2908" x2="1556" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill145"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip146"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="1716" y1="2639" x2="1716" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill147"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip148"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="1876" y1="1645" x2="1876" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill149"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip150"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="2036" y1="867" x2="2036" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill151"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip152"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="2196" y1="2997" x2="2196" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill153"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip154"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="2357" y1="2912" x2="2357" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill155"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip156"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="2517" y1="2756" x2="2517" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill157"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip158"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="2677" y1="2327" x2="2677" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill159"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip160"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="2837" y1="1967" x2="2837" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill161"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip162"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="2997" y1="3021" x2="2997" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill163"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip164"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="3157" y1="2983" x2="3157" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill165"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip166"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="3318" y1="2887" x2="3318" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill167"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip168"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="3478" y1="2682" x2="3478" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill169"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip170"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="3638" y1="2517" x2="3638" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill171"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip172"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="3798" y1="3038" x2="3798" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill173"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip174"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="3958" y1="3017" x2="3958" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill175"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip176"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="4118" y1="2952" x2="4118" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill177"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip178"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="4278" y1="2851" x2="4278" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill179"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip180"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="4439" y1="2793" x2="4439" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill181"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip182"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="4599" y1="3040" x2="4599" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill183"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip184"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="4759" y1="3023" x2="4759" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill185"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip186"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="4919" y1="2965" x2="4919" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill187"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip188"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="5079" y1="2882" x2="5079" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill189"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip190"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="5239" y1="2854" x2="5239" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill191"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip192"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="5400" y1="3049" x2="5400" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill193"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip194"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="5560" y1="3035" x2="5560" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill195"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip196"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="5720" y1="3004" x2="5720" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill197"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip198"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="5880" y1="2969" x2="5880" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill199"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip200"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="6040" y1="2949" x2="6040" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill201"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip202"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="6200" y1="3047" x2="6200" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill203"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip204"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="6360" y1="3024" x2="6360" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill205"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip206"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="6521" y1="2963" x2="6521" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill207"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip208"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="6681" y1="2922" x2="6681" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill209"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip210"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="6841" y1="2910" x2="6841" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill211"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip212"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="1430" y1="2930" x2="1430" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill213"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip214"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="1591" y1="2904" x2="1591" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill215"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip216"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="1751" y1="2689" x2="1751" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill217"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip218"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="1911" y1="1833" x2="1911" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill219"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip220"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="2071" y1="1269" x2="2071" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill221"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip222"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="2231" y1="2980" x2="2231" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill223"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip224"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="2391" y1="2959" x2="2391" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill225"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip226"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="2552" y1="2832" x2="2552" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill227"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip228"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="2712" y1="2445" x2="2712" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill229"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip230"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="2872" y1="2148" x2="2872" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill231"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip232"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="3032" y1="3015" x2="3032" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill233"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip234"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="3192" y1="3001" x2="3192" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill235"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip236"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="3352" y1="2916" x2="3352" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill237"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip238"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="3512" y1="2734" x2="3512" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill239"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip240"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="3673" y1="2603" x2="3673" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill241"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip242"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="3833" y1="3035" x2="3833" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill243"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip244"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="3993" y1="3025" x2="3993" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill245"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip246"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="4153" y1="2960" x2="4153" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill247"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip248"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="4313" y1="2860" x2="4313" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill249"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip250"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="4473" y1="2811" x2="4473" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill251"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip252"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="4634" y1="3041" x2="4634" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill253"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip254"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="4794" y1="3034" x2="4794" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill255"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip256"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="4954" y1="2986" x2="4954" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill257"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip258"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="5114" y1="2908" x2="5114" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill259"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip260"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="5274" y1="2871" x2="5274" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill261"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip262"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="5434" y1="3047" x2="5434" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill263"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip264"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="5594" y1="3043" x2="5594" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill265"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip266"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="5755" y1="3016" x2="5755" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill267"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip268"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="5915" y1="2986" x2="5915" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill269"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip270"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="6075" y1="2966" x2="6075" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill271"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip272"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="6235" y1="3041" x2="6235" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill273"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip274"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="6395" y1="3034" x2="6395" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill275"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip276"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="6555" y1="2993" x2="6555" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill277"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip278"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="6715" y1="2950" x2="6715" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill279"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip280"><rect x="1298" y="317" width="5609" height="2807"/></clipPath><linearGradient x1="6876" y1="2939" x2="6876" y2="3121" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill281"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip282"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip283"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip284"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip285"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip286"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip287"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip288"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip289"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip290"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip291"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip292"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip293"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip294"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip295"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip296"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip297"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip298"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip299"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip300"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip301"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip302"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip303"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip304"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip305"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip306"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip307"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip308"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip309"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip310"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip311"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip312"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip313"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip314"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip315"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip316"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip317"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip318"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip319"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip320"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip321"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip322"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip323"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip324"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip325"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip326"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip327"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip328"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip329"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip330"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip331"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip332"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip333"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip334"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip335"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip336"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip337"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip338"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip339"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip340"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip341"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><linearGradient x1="3418.5" y1="3473" x2="3418.5" y2="3496" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill342"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip343"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip344"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><linearGradient x1="3777.5" y1="3473" x2="3777.5" y2="3496" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill345"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip346"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip347"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><linearGradient x1="3997" y1="3473" x2="3997" y2="3496" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill348"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip349"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><clipPath id="clip350"><rect x="1075" y="68" width="5879" height="3482"/></clipPath><linearGradient x1="4268.5" y1="3473" x2="4268.5" y2="3496" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill351"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip352"><rect x="1075" y="68" width="5879" height="3482"/></clipPath></defs><g clip-path="url(#clip0)" transform="translate(-1075 -68)"><rect x="1076" y="69.0001" width="5878" height="3482" fill="#FFFFFF"/><g clip-path="url(#clip1)"><path d="M1298.5 2653.59 6903.5 2653.59M1298.5 2186.57 6903.5 2186.57M1298.5 1719.56 6903.5 1719.56M1298.5 1251.54 6903.5 1251.54M1298.5 784.526 6903.5 784.526M1298.5 317.5 6903.5 317.5" stroke="#E0E5EB" stroke-width="3.4375" stroke-linejoin="round" stroke-miterlimit="10" fill="none"/></g><g clip-path="url(#clip2)"><rect x="1312" y="2935" width="27.9999" height="186" fill="url(#fill3)"/></g><g clip-path="url(#clip4)"><rect x="1472" y="2908" width="28.0002" height="213" fill="url(#fill5)"/></g><g clip-path="url(#clip6)"><rect x="1633" y="2696" width="27.9999" height="425" fill="url(#fill7)"/></g><g clip-path="url(#clip8)"><rect x="1793" y="1861" width="27.9999" height="1260" fill="url(#fill9)"/></g><g clip-path="url(#clip10)"><rect x="1953" y="1299" width="28.0002" height="1822" fill="url(#fill11)"/></g><g clip-path="url(#clip12)"><rect x="2113" y="2995" width="27.9998" height="126" fill="url(#fill13)"/></g><g clip-path="url(#clip14)"><rect x="2273" y="2980" width="28.0002" height="141" fill="url(#fill15)"/></g><g clip-path="url(#clip16)"><rect x="2433" y="2866" width="27.9998" height="255" fill="url(#fill17)"/></g><g clip-path="url(#clip18)"><rect x="2594" y="2452" width="28.0002" height="669" fill="url(#fill19)"/></g><g clip-path="url(#clip20)"><rect x="2754" y="2194" width="27.9998" height="927" fill="url(#fill21)"/></g><g clip-path="url(#clip22)"><rect x="2914" y="3025" width="28.0002" height="96" fill="url(#fill23)"/></g><g clip-path="url(#clip24)"><rect x="3074" y="3012" width="27.9998" height="109" fill="url(#fill25)"/></g><g clip-path="url(#clip26)"><rect x="3234" y="2940" width="27.9998" height="181" fill="url(#fill27)"/></g><g clip-path="url(#clip28)"><rect x="3394" y="2760" width="28.0002" height="361" fill="url(#fill29)"/></g><g clip-path="url(#clip30)"><rect x="3554" y="2641" width="27.9998" height="480" fill="url(#fill31)"/></g><g clip-path="url(#clip32)"><rect x="3715" y="3039" width="28.0002" height="81.9998" fill="url(#fill33)"/></g><g clip-path="url(#clip34)"><rect x="3875" y="3032" width="27.9998" height="89" fill="url(#fill35)"/></g><g clip-path="url(#clip36)"><rect x="4035" y="2985" width="28.0002" height="136" fill="url(#fill37)"/></g><g clip-path="url(#clip38)"><rect x="4195" y="2888" width="28" height="233" fill="url(#fill39)"/></g><g clip-path="url(#clip40)"><rect x="4355" y="2843" width="28" height="278" fill="url(#fill41)"/></g><g clip-path="url(#clip42)"><rect x="4515" y="3045" width="28" height="76" fill="url(#fill43)"/></g><g clip-path="url(#clip44)"><rect x="4676" y="3039" width="28" height="81.9998" fill="url(#fill45)"/></g><g clip-path="url(#clip46)"><rect x="4836" y="2996" width="28" height="125" fill="url(#fill47)"/></g><g clip-path="url(#clip48)"><rect x="4996" y="2929" width="28" height="192" fill="url(#fill49)"/></g><g clip-path="url(#clip50)"><rect x="5156" y="2897" width="28" height="224" fill="url(#fill51)"/></g><g clip-path="url(#clip52)"><rect x="5316" y="3050" width="28" height="70.9998" fill="url(#fill53)"/></g><g clip-path="url(#clip54)"><rect x="5476" y="3043" width="28" height="78" fill="url(#fill55)"/></g><g clip-path="url(#clip56)"><rect x="5636" y="3015" width="28" height="106" fill="url(#fill57)"/></g><g clip-path="url(#clip58)"><rect x="5797" y="2982" width="28" height="139" fill="url(#fill59)"/></g><g clip-path="url(#clip60)"><rect x="5957" y="2968" width="28" height="153" fill="url(#fill61)"/></g><g clip-path="url(#clip62)"><rect x="6117" y="3047" width="28" height="74" fill="url(#fill63)"/></g><g clip-path="url(#clip64)"><rect x="6277" y="3039" width="28" height="81.9998" fill="url(#fill65)"/></g><g clip-path="url(#clip66)"><rect x="6437" y="3009" width="28" height="112" fill="url(#fill67)"/></g><g clip-path="url(#clip68)"><rect x="6597" y="2984" width="28" height="137" fill="url(#fill69)"/></g><g clip-path="url(#clip70)"><rect x="6757" y="2968" width="28" height="153" fill="url(#fill71)"/></g><g clip-path="url(#clip72)"><rect x="1347" y="2993" width="27.9999" height="128" fill="url(#fill73)"/></g><g clip-path="url(#clip74)"><rect x="1507" y="2981" width="27.9999" height="140" fill="url(#fill75)"/></g><g clip-path="url(#clip76)"><rect x="1667" y="2844" width="28.0002" height="277" fill="url(#fill77)"/></g><g clip-path="url(#clip78)"><rect x="1827" y="2181" width="27.9999" height="940" fill="url(#fill79)"/></g><g clip-path="url(#clip80)"><rect x="1988" y="1605" width="28.0002" height="1516" fill="url(#fill81)"/></g><g clip-path="url(#clip82)"><rect x="2148" y="3062" width="27.9998" height="58.9998" fill="url(#fill83)"/></g><g clip-path="url(#clip84)"><rect x="2308" y="3056" width="28.0002" height="65" fill="url(#fill85)"/></g><g clip-path="url(#clip86)"><rect x="2468" y="2982" width="27.9998" height="139" fill="url(#fill87)"/></g><g clip-path="url(#clip88)"><rect x="2628" y="2610" width="28.0002" height="511" fill="url(#fill89)"/></g><g clip-path="url(#clip90)"><rect x="2788" y="2395" width="27.9998" height="726" fill="url(#fill91)"/></g><g clip-path="url(#clip92)"><rect x="2949" y="3088" width="27.9998" height="33" fill="url(#fill93)"/></g><g clip-path="url(#clip94)"><rect x="3109" y="3085" width="28.0002" height="35.9998" fill="url(#fill95)"/></g><g clip-path="url(#clip96)"><rect x="3269" y="3049" width="27.9998" height="71.9998" fill="url(#fill97)"/></g><g clip-path="url(#clip98)"><rect x="3429" y="2857" width="28.0002" height="264" fill="url(#fill99)"/></g><g clip-path="url(#clip100)"><rect x="3589" y="2758" width="27.9998" height="363" fill="url(#fill101)"/></g><g clip-path="url(#clip102)"><rect x="3749" y="3103" width="28.0002" height="17.9998" fill="url(#fill103)"/></g><g clip-path="url(#clip104)"><rect x="3909" y="3101" width="27.9998" height="20" fill="url(#fill105)"/></g><g clip-path="url(#clip106)"><rect x="4070" y="3082" width="28" height="38.9998" fill="url(#fill107)"/></g><g clip-path="url(#clip108)"><rect x="4230" y="2983" width="28" height="138" fill="url(#fill109)"/></g><g clip-path="url(#clip110)"><rect x="4390" y="2933" width="28" height="188" fill="url(#fill111)"/></g><g clip-path="url(#clip112)"><rect x="4550" y="3105" width="28" height="15.9998" fill="url(#fill113)"/></g><g clip-path="url(#clip114)"><rect x="4710" y="3103" width="28" height="17.9998" fill="url(#fill115)"/></g><g clip-path="url(#clip116)"><rect x="4870" y="3090" width="28" height="31" fill="url(#fill117)"/></g><g clip-path="url(#clip118)"><rect x="5031" y="3033" width="28" height="88" fill="url(#fill119)"/></g><g clip-path="url(#clip120)"><rect x="5191" y="3003" width="28" height="118" fill="url(#fill121)"/></g><g clip-path="url(#clip122)"><rect x="5351" y="3111" width="28" height="10" fill="url(#fill123)"/></g><g clip-path="url(#clip124)"><rect x="5511" y="3110" width="28" height="11" fill="url(#fill125)"/></g><g clip-path="url(#clip126)"><rect x="5671" y="3104" width="28" height="16.9998" fill="url(#fill127)"/></g><g clip-path="url(#clip128)"><rect x="5831" y="3069" width="28" height="52" fill="url(#fill129)"/></g><g clip-path="url(#clip130)"><rect x="5991" y="3044" width="28" height="77" fill="url(#fill131)"/></g><g clip-path="url(#clip132)"><rect x="6152" y="3108" width="28" height="13" fill="url(#fill133)"/></g><g clip-path="url(#clip134)"><rect x="6312" y="3106" width="28" height="14.9998" fill="url(#fill135)"/></g><g clip-path="url(#clip136)"><rect x="6472" y="3097" width="28" height="24" fill="url(#fill137)"/></g><g clip-path="url(#clip138)"><rect x="6632" y="3074" width="28" height="46.9998" fill="url(#fill139)"/></g><g clip-path="url(#clip140)"><rect x="6792" y="3050" width="28" height="70.9998" fill="url(#fill141)"/></g><g clip-path="url(#clip142)"><rect x="1382" y="2936" width="28.0002" height="185" fill="url(#fill143)"/></g><g clip-path="url(#clip144)"><rect x="1542" y="2908" width="27.9999" height="213" fill="url(#fill145)"/></g><g clip-path="url(#clip146)"><rect x="1702" y="2639" width="28.0002" height="482" fill="url(#fill147)"/></g><g clip-path="url(#clip148)"><rect x="1862" y="1645" width="27.9999" height="1476" fill="url(#fill149)"/></g><g clip-path="url(#clip150)"><rect x="2022" y="867" width="28.0001" height="2254" fill="url(#fill151)"/></g><g clip-path="url(#clip152)"><rect x="2182" y="2997" width="27.9998" height="124" fill="url(#fill153)"/></g><g clip-path="url(#clip154)"><rect x="2343" y="2912" width="27.9998" height="209" fill="url(#fill155)"/></g><g clip-path="url(#clip156)"><rect x="2503" y="2756" width="28.0002" height="365" fill="url(#fill157)"/></g><g clip-path="url(#clip158)"><rect x="2663" y="2327" width="27.9998" height="794" fill="url(#fill159)"/></g><g clip-path="url(#clip160)"><rect x="2823" y="1967" width="28.0002" height="1154" fill="url(#fill161)"/></g><g clip-path="url(#clip162)"><rect x="2983" y="3021" width="27.9998" height="100" fill="url(#fill163)"/></g><g clip-path="url(#clip164)"><rect x="3143" y="2983" width="28.0002" height="138" fill="url(#fill165)"/></g><g clip-path="url(#clip166)"><rect x="3304" y="2887" width="27.9998" height="234" fill="url(#fill167)"/></g><g clip-path="url(#clip168)"><rect x="3464" y="2682" width="28.0002" height="439" fill="url(#fill169)"/></g><g clip-path="url(#clip170)"><rect x="3624" y="2517" width="27.9998" height="604" fill="url(#fill171)"/></g><g clip-path="url(#clip172)"><rect x="3784" y="3038" width="27.9998" height="82.9998" fill="url(#fill173)"/></g><g clip-path="url(#clip174)"><rect x="3944" y="3017" width="28.0002" height="104" fill="url(#fill175)"/></g><g clip-path="url(#clip176)"><rect x="4104" y="2952" width="28" height="169" fill="url(#fill177)"/></g><g clip-path="url(#clip178)"><rect x="4264" y="2851" width="28" height="270" fill="url(#fill179)"/></g><g clip-path="url(#clip180)"><rect x="4425" y="2793" width="28" height="328" fill="url(#fill181)"/></g><g clip-path="url(#clip182)"><rect x="4585" y="3040" width="28" height="80.9998" fill="url(#fill183)"/></g><g clip-path="url(#clip184)"><rect x="4745" y="3023" width="28" height="98" fill="url(#fill185)"/></g><g clip-path="url(#clip186)"><rect x="4905" y="2965" width="28" height="156" fill="url(#fill187)"/></g><g clip-path="url(#clip188)"><rect x="5065" y="2882" width="28" height="239" fill="url(#fill189)"/></g><g clip-path="url(#clip190)"><rect x="5225" y="2854" width="28" height="267" fill="url(#fill191)"/></g><g clip-path="url(#clip192)"><rect x="5386" y="3049" width="28" height="71.9998" fill="url(#fill193)"/></g><g clip-path="url(#clip194)"><rect x="5546" y="3035" width="28" height="86" fill="url(#fill195)"/></g><g clip-path="url(#clip196)"><rect x="5706" y="3004" width="28" height="117" fill="url(#fill197)"/></g><g clip-path="url(#clip198)"><rect x="5866" y="2969" width="28" height="152" fill="url(#fill199)"/></g><g clip-path="url(#clip200)"><rect x="6026" y="2949" width="28" height="172" fill="url(#fill201)"/></g><g clip-path="url(#clip202)"><rect x="6186" y="3047" width="28" height="74" fill="url(#fill203)"/></g><g clip-path="url(#clip204)"><rect x="6346" y="3024" width="28" height="97" fill="url(#fill205)"/></g><g clip-path="url(#clip206)"><rect x="6507" y="2963" width="28" height="158" fill="url(#fill207)"/></g><g clip-path="url(#clip208)"><rect x="6667" y="2922" width="28" height="199" fill="url(#fill209)"/></g><g clip-path="url(#clip210)"><rect x="6827" y="2910" width="28" height="211" fill="url(#fill211)"/></g><g clip-path="url(#clip212)"><rect x="1416" y="2930" width="28.0002" height="191" fill="url(#fill213)"/></g><g clip-path="url(#clip214)"><rect x="1577" y="2904" width="27.9999" height="217" fill="url(#fill215)"/></g><g clip-path="url(#clip216)"><rect x="1737" y="2689" width="28.0002" height="432" fill="url(#fill217)"/></g><g clip-path="url(#clip218)"><rect x="1897" y="1833" width="27.9999" height="1288" fill="url(#fill219)"/></g><g clip-path="url(#clip220)"><rect x="2057" y="1269" width="27.9998" height="1852" fill="url(#fill221)"/></g><g clip-path="url(#clip222)"><rect x="2217" y="2980" width="28.0002" height="141" fill="url(#fill223)"/></g><g clip-path="url(#clip224)"><rect x="2377" y="2959" width="27.9998" height="162" fill="url(#fill225)"/></g><g clip-path="url(#clip226)"><rect x="2538" y="2832" width="28.0002" height="289" fill="url(#fill227)"/></g><g clip-path="url(#clip228)"><rect x="2698" y="2445" width="27.9998" height="676" fill="url(#fill229)"/></g><g clip-path="url(#clip230)"><rect x="2858" y="2148" width="28.0002" height="973" fill="url(#fill231)"/></g><g clip-path="url(#clip232)"><rect x="3018" y="3015" width="27.9998" height="106" fill="url(#fill233)"/></g><g clip-path="url(#clip234)"><rect x="3178" y="3001" width="28.0002" height="120" fill="url(#fill235)"/></g><g clip-path="url(#clip236)"><rect x="3338" y="2916" width="27.9998" height="205" fill="url(#fill237)"/></g><g clip-path="url(#clip238)"><rect x="3498" y="2734" width="27.9998" height="387" fill="url(#fill239)"/></g><g clip-path="url(#clip240)"><rect x="3659" y="2603" width="28.0002" height="518" fill="url(#fill241)"/></g><g clip-path="url(#clip242)"><rect x="3819" y="3035" width="27.9998" height="86" fill="url(#fill243)"/></g><g clip-path="url(#clip244)"><rect x="3979" y="3025" width="28.0002" height="96" fill="url(#fill245)"/></g><g clip-path="url(#clip246)"><rect x="4139" y="2960" width="28" height="161" fill="url(#fill247)"/></g><g clip-path="url(#clip248)"><rect x="4299" y="2860" width="28" height="261" fill="url(#fill249)"/></g><g clip-path="url(#clip250)"><rect x="4459" y="2811" width="28" height="310" fill="url(#fill251)"/></g><g clip-path="url(#clip252)"><rect x="4620" y="3041" width="28" height="79.9998" fill="url(#fill253)"/></g><g clip-path="url(#clip254)"><rect x="4780" y="3034" width="28" height="87" fill="url(#fill255)"/></g><g clip-path="url(#clip256)"><rect x="4940" y="2986" width="28" height="135" fill="url(#fill257)"/></g><g clip-path="url(#clip258)"><rect x="5100" y="2908" width="28" height="213" fill="url(#fill259)"/></g><g clip-path="url(#clip260)"><rect x="5260" y="2871" width="28" height="250" fill="url(#fill261)"/></g><g clip-path="url(#clip262)"><rect x="5420" y="3047" width="28" height="74" fill="url(#fill263)"/></g><g clip-path="url(#clip264)"><rect x="5580" y="3043" width="28" height="78" fill="url(#fill265)"/></g><g clip-path="url(#clip266)"><rect x="5741" y="3016" width="28" height="105" fill="url(#fill267)"/></g><g clip-path="url(#clip268)"><rect x="5901" y="2986" width="28" height="135" fill="url(#fill269)"/></g><g clip-path="url(#clip270)"><rect x="6061" y="2966" width="28" height="155" fill="url(#fill271)"/></g><g clip-path="url(#clip272)"><rect x="6221" y="3041" width="28" height="79.9998" fill="url(#fill273)"/></g><g clip-path="url(#clip274)"><rect x="6381" y="3034" width="28" height="87" fill="url(#fill275)"/></g><g clip-path="url(#clip276)"><rect x="6541" y="2993" width="28" height="128" fill="url(#fill277)"/></g><g clip-path="url(#clip278)"><rect x="6701" y="2950" width="28" height="171" fill="url(#fill279)"/></g><g clip-path="url(#clip280)"><rect x="6862" y="2939" width="28" height="182" fill="url(#fill281)"/></g><g clip-path="url(#clip282)"><path d="M1298.5 3121.5 6903.5 3121.5" stroke="#E0E5EB" stroke-width="3.4375" stroke-linejoin="round" stroke-miterlimit="10" fill="none" fill-rule="evenodd"/></g><g clip-path="url(#clip283)"><path d="M1298.5 3121.5 1298.5 3201.5M2098.57 3121.5 2098.57 3201.5M2899.6 3121.5 2899.6 3201.5M3700.62 3121.5 3700.62 3201.5M4501.65 3121.5 4501.65 3201.5M5301.67 3121.5 5301.67 3201.5M6102.7 3121.5 6102.7 3201.5M6903.5 3121.5 6903.5 3201.5" stroke="#E0E5EB" stroke-width="3.4375" stroke-linejoin="round" stroke-miterlimit="10" fill="none"/></g><g clip-path="url(#clip284)"><path d="M1298.5 3201.5 1298.5 3281.5M2098.57 3201.5 2098.57 3281.5M2899.6 3201.5 2899.6 3281.5M3700.62 3201.5 3700.62 3281.5M4501.65 3201.5 4501.65 3281.5M5301.67 3201.5 5301.67 3281.5M6102.7 3201.5 6102.7 3281.5M6903.5 3201.5 6903.5 3281.5" stroke="#E0E5EB" stroke-width="3.4375" stroke-linejoin="round" stroke-miterlimit="10" fill="none"/></g><g clip-path="url(#clip285)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1239.13 3133)">0</text></g><g clip-path="url(#clip286)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1218.23 2665)">50</text></g><g clip-path="url(#clip287)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1197.33 2198)">100</text></g><g clip-path="url(#clip288)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1197.33 1731)">150</text></g><g clip-path="url(#clip289)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1197.33 1263)">200</text></g><g clip-path="url(#clip290)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1197.33 796)">250</text></g><g clip-path="url(#clip291)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1197.33 329)">300</text></g><g clip-path="url(#clip292)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1357.47 3186)">20</text></g><g clip-path="url(#clip293)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1517.62 3186)">32</text></g><g clip-path="url(#clip294)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1667.33 3186)">128</text></g><g clip-path="url(#clip295)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1827.48 3186)">384</text></g><g clip-path="url(#clip296)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1987.63 3186)">512</text></g><g clip-path="url(#clip297)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2158.23 3186)">20</text></g><g clip-path="url(#clip298)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2318.39 3186)">32</text></g><g clip-path="url(#clip299)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2468.09 3186)">128</text></g><g clip-path="url(#clip300)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2628.24 3186)">384</text></g><g clip-path="url(#clip301)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2788.4 3186)">512</text></g><g clip-path="url(#clip302)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2959 3186)">20</text></g><g clip-path="url(#clip303)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3119.15 3186)">32</text></g><g clip-path="url(#clip304)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3268.86 3186)">128</text></g><g clip-path="url(#clip305)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3429.01 3186)">384</text></g><g clip-path="url(#clip306)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3589.16 3186)">512</text></g><g clip-path="url(#clip307)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3759.76 3186)">20</text></g><g clip-path="url(#clip308)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3919.92 3186)">32</text></g><g clip-path="url(#clip309)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4069.62 3186)">128</text></g><g clip-path="url(#clip310)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4229.77 3186)">384</text></g><g clip-path="url(#clip311)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4389.93 3186)">512</text></g><g clip-path="url(#clip312)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4560.53 3186)">20</text></g><g clip-path="url(#clip313)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4720.68 3186)">32</text></g><g clip-path="url(#clip314)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4870.38 3186)">128</text></g><g clip-path="url(#clip315)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5030.54 3186)">384</text></g><g clip-path="url(#clip316)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5190.69 3186)">512</text></g><g clip-path="url(#clip317)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5361.29 3186)">20</text></g><g clip-path="url(#clip318)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5521.45 3186)">32</text></g><g clip-path="url(#clip319)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5671.15 3186)">128</text></g><g clip-path="url(#clip320)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5831.3 3186)">384</text></g><g clip-path="url(#clip321)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5991.45 3186)">512</text></g><g clip-path="url(#clip322)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6162.06 3186)">20</text></g><g clip-path="url(#clip323)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6322.21 3186)">32</text></g><g clip-path="url(#clip324)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6471.91 3186)">128</text></g><g clip-path="url(#clip325)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6632.07 3186)">384</text></g><g clip-path="url(#clip326)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6792.22 3186)">512</text></g><g clip-path="url(#clip327)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1688.23 3266)">1</text></g><g clip-path="url(#clip328)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2488.99 3266)">2</text></g><g clip-path="url(#clip329)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3289.76 3266)">4</text></g><g clip-path="url(#clip330)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4090.52 3266)">8</text></g><g clip-path="url(#clip331)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4880.83 3266)">16</text></g><g clip-path="url(#clip332)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5681.6 3266)">40</text></g><g clip-path="url(#clip333)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6482.36 3266)">80</text></g><g clip-path="url(#clip334)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="41" transform="matrix(6.12323e-17 -1 1 6.12323e-17 1172.23 1828)">Latency (ms)</text></g><g clip-path="url(#clip335)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="41" transform="matrix(1 0 0 1 3955.41 3327)">Sequence Length</text></g><g clip-path="url(#clip336)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="41" transform="matrix(1 0 0 1 3827.88 3378)">Number of computational cores</text></g><g clip-path="url(#clip337)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="73" transform="matrix(1 0 0 1 2702.62 173)">Intel Ice lake Xeon 8380 </text></g><g clip-path="url(#clip338)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="73" transform="matrix(1 0 0 1 3437.97 173)">-</text></g><g clip-path="url(#clip339)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="73" transform="matrix(1 0 0 1 3477 173)">Graph Mode Frameworks Latency Benchmark (batch size = 1)</text></g><g clip-path="url(#clip340)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-style="italic" font-weight="700" font-size="55" transform="matrix(1 0 0 1 3192.44 245)">(some data might be missing indicating benchmark crashed during runs)</text></g><g clip-path="url(#clip341)"><rect x="3407" y="3473" width="23" height="23" fill="url(#fill342)"/></g><g clip-path="url(#clip343)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3439.74 3496)">intel-tensorflow</text></g><g clip-path="url(#clip344)"><rect x="3766" y="3473" width="23" height="23" fill="url(#fill345)"/></g><g clip-path="url(#clip346)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3798.68 3496)">pytorch</text></g><g clip-path="url(#clip347)"><rect x="3986" y="3473" width="22" height="23" fill="url(#fill348)"/></g><g clip-path="url(#clip349)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4017.88 3496)">tensorflow</text></g><g clip-path="url(#clip350)"><rect x="4257" y="3473" width="23" height="23" fill="url(#fill351)"/></g><g clip-path="url(#clip352)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4289.23 3496)">tensorflow + onednn</text></g><rect x="1076.5" y="69.4999" width="5878" height="3482" stroke="#E0E5EB" stroke-width="3.4375" stroke-linejoin="round" stroke-miterlimit="10" fill="none"/></g></svg> | 4 |
0 | hf_public_repos/blog/assets/35_bert_cpu_scaling_part_2 | hf_public_repos/blog/assets/35_bert_cpu_scaling_part_2/baselines/graph_mode_torchscript_baseline.svg | <svg width="5699" height="3155" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" overflow="hidden"><defs><clipPath id="clip0"><rect x="1263" y="130" width="5699" height="3155"/></clipPath><clipPath id="clip1"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip2"><rect x="1506" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="1662.5" y1="2706" x2="1662.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill3"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip4"><rect x="1506" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="2742.5" y1="2685" x2="2742.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill5"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip6"><rect x="1506" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="3823.5" y1="2397" x2="3823.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill7"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip8"><rect x="1506" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="4903.5" y1="842" x2="4903.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill9"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip10"><rect x="1506" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="1854.5" y1="2914" x2="1854.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill11"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip12"><rect x="1506" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="2935.5" y1="2908" x2="2935.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill13"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip14"><rect x="1506" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="4015.5" y1="2829" x2="4015.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill15"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip16"><rect x="1506" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="5096.5" y1="2491" x2="5096.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill17"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip18"><rect x="1506" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="6176.5" y1="2181" x2="6176.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill19"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip20"><rect x="1506" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="2047.5" y1="2951" x2="2047.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill21"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip22"><rect x="1506" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="3127.5" y1="2948" x2="3127.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill23"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip24"><rect x="1506" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="4208.5" y1="2921" x2="4208.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill25"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip26"><rect x="1506" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="5288.5" y1="2830" x2="5288.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill27"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip28"><rect x="1506" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="6369.5" y1="2694" x2="6369.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill29"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip30"><rect x="1506" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="2239.5" y1="2963" x2="2239.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill31"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip32"><rect x="1506" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="3320.5" y1="2961" x2="3320.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill33"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip34"><rect x="1506" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="4400.5" y1="2948" x2="4400.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill35"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip36"><rect x="1506" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="5481.5" y1="2894" x2="5481.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill37"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip38"><rect x="1506" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="6561.5" y1="2882" x2="6561.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill39"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip40"><rect x="1506" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="2432.5" y1="2957" x2="2432.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill41"><stop offset="0" stop-color="#71A6DB"/><stop offset="0.5" stop-color="#559BDB"/><stop offset="1" stop-color="#438AC9"/></linearGradient><clipPath id="clip42"><rect x="1506" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="3512.5" y1="2954" x2="3512.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill43"><stop offset="0" stop-color="#71A6DB"/><stop offset="0.5" stop-color="#559BDB"/><stop offset="1" stop-color="#438AC9"/></linearGradient><clipPath id="clip44"><rect x="1506" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="4593.5" y1="2934" x2="4593.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill45"><stop offset="0" stop-color="#71A6DB"/><stop offset="0.5" stop-color="#559BDB"/><stop offset="1" stop-color="#438AC9"/></linearGradient><clipPath id="clip46"><rect x="1506" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="5673.5" y1="2905" x2="5673.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill47"><stop offset="0" stop-color="#71A6DB"/><stop offset="0.5" stop-color="#559BDB"/><stop offset="1" stop-color="#438AC9"/></linearGradient><clipPath id="clip48"><rect x="1506" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="6754.5" y1="2824" x2="6754.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill49"><stop offset="0" stop-color="#71A6DB"/><stop offset="0.5" stop-color="#559BDB"/><stop offset="1" stop-color="#438AC9"/></linearGradient><clipPath id="clip50"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip51"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip52"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip53"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip54"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip55"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip56"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip57"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip58"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip59"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip60"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip61"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip62"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip63"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip64"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip65"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip66"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip67"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip68"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip69"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip70"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip71"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip72"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip73"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip74"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip75"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip76"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip77"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip78"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip79"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip80"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip81"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip82"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip83"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip84"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip85"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip86"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip87"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip88"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip89"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip90"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip91"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip92"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip93"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip94"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><linearGradient x1="3914.5" y1="3206" x2="3914.5" y2="3228" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill95"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip96"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip97"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><linearGradient x1="3994" y1="3206" x2="3994" y2="3228" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill98"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip99"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip100"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><linearGradient x1="4073.5" y1="3206" x2="4073.5" y2="3228" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill101"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip102"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip103"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><linearGradient x1="4173.5" y1="3206" x2="4173.5" y2="3228" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill104"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip105"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip106"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><linearGradient x1="4274" y1="3206" x2="4274" y2="3228" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill107"><stop offset="0" stop-color="#71A6DB"/><stop offset="0.5" stop-color="#559BDB"/><stop offset="1" stop-color="#438AC9"/></linearGradient><clipPath id="clip108"><rect x="1263" y="130" width="5696" height="3153"/></clipPath></defs><g clip-path="url(#clip0)" transform="translate(-1263 -130)"><rect x="1264" y="131" width="5696" height="3152" fill="#FFFFFF"/><g clip-path="url(#clip1)"><path d="M1506.5 2565.58 6909.5 2565.58M1506.5 2150.57 6909.5 2150.57M1506.5 1735.56 6909.5 1735.56M1506.5 1320.54 6909.5 1320.54M1506.5 905.53 6909.5 905.53M1506.5 491.5 6909.5 491.5" stroke="#E0E5EB" stroke-width="3.4375" stroke-linejoin="round" stroke-miterlimit="10" fill="none"/></g><g clip-path="url(#clip2)"><rect x="1585" y="2706" width="155" height="274" fill="url(#fill3)"/></g><g clip-path="url(#clip4)"><rect x="2665" y="2685" width="155" height="295" fill="url(#fill5)"/></g><g clip-path="url(#clip6)"><rect x="3746" y="2397" width="155" height="583" fill="url(#fill7)"/></g><g clip-path="url(#clip8)"><rect x="4826" y="842" width="155" height="2138" fill="url(#fill9)"/></g><g clip-path="url(#clip10)"><rect x="1777" y="2914" width="155" height="66" fill="url(#fill11)"/></g><g clip-path="url(#clip12)"><rect x="2858" y="2908" width="155" height="71.9998" fill="url(#fill13)"/></g><g clip-path="url(#clip14)"><rect x="3938" y="2829" width="155" height="151" fill="url(#fill15)"/></g><g clip-path="url(#clip16)"><rect x="5019" y="2491" width="155" height="489" fill="url(#fill17)"/></g><g clip-path="url(#clip18)"><rect x="6099" y="2181" width="155" height="799" fill="url(#fill19)"/></g><g clip-path="url(#clip20)"><rect x="1970" y="2951" width="155" height="28.9998" fill="url(#fill21)"/></g><g clip-path="url(#clip22)"><rect x="3050" y="2948" width="155" height="32" fill="url(#fill23)"/></g><g clip-path="url(#clip24)"><rect x="4131" y="2921" width="155" height="59.0002" fill="url(#fill25)"/></g><g clip-path="url(#clip26)"><rect x="5211" y="2830" width="155" height="150" fill="url(#fill27)"/></g><g clip-path="url(#clip28)"><rect x="6292" y="2694" width="155" height="286" fill="url(#fill29)"/></g><g clip-path="url(#clip30)"><rect x="2162" y="2963" width="155" height="16.9998" fill="url(#fill31)"/></g><g clip-path="url(#clip32)"><rect x="3243" y="2961" width="155" height="19" fill="url(#fill33)"/></g><g clip-path="url(#clip34)"><rect x="4323" y="2948" width="155" height="32" fill="url(#fill35)"/></g><g clip-path="url(#clip36)"><rect x="5404" y="2894" width="155" height="86" fill="url(#fill37)"/></g><g clip-path="url(#clip38)"><rect x="6484" y="2882" width="155" height="98" fill="url(#fill39)"/></g><g clip-path="url(#clip40)"><rect x="2355" y="2957" width="155" height="23" fill="url(#fill41)"/></g><g clip-path="url(#clip42)"><rect x="3435" y="2954" width="155" height="26.0002" fill="url(#fill43)"/></g><g clip-path="url(#clip44)"><rect x="4516" y="2934" width="155" height="46" fill="url(#fill45)"/></g><g clip-path="url(#clip46)"><rect x="5596" y="2905" width="155" height="75" fill="url(#fill47)"/></g><g clip-path="url(#clip48)"><rect x="6677" y="2824" width="155" height="156" fill="url(#fill49)"/></g><g clip-path="url(#clip50)"><path d="M1506.5 2980.5 6909.5 2980.5" stroke="#E0E5EB" stroke-width="3.4375" stroke-linejoin="round" stroke-miterlimit="10" fill="none" fill-rule="evenodd"/></g><g clip-path="url(#clip51)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1630.83 2674)">132</text></g><g clip-path="url(#clip52)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2711.34 2653)">143</text></g><g clip-path="url(#clip53)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3791.84 2365)">281</text></g><g clip-path="url(#clip54)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4861.9 810)">1031</text></g><g clip-path="url(#clip55)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1833.78 2882)">32</text></g><g clip-path="url(#clip56)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2914.29 2876)">35</text></g><g clip-path="url(#clip57)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3994.79 2797)">73</text></g><g clip-path="url(#clip58)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5064.85 2459)">236</text></g><g clip-path="url(#clip59)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6145.36 2149)">385</text></g><g clip-path="url(#clip60)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2026.29 2919)">14</text></g><g clip-path="url(#clip61)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3106.79 2916)">16</text></g><g clip-path="url(#clip62)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4187.3 2889)">29</text></g><g clip-path="url(#clip63)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5267.8 2798)">72</text></g><g clip-path="url(#clip64)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6337.86 2662)">138</text></g><g clip-path="url(#clip65)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2229.24 2931)">9</text></g><g clip-path="url(#clip66)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3309.75 2929)">9</text></g><g clip-path="url(#clip67)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4379.8 2916)">16</text></g><g clip-path="url(#clip68)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5460.31 2862)">42</text></g><g clip-path="url(#clip69)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6540.81 2851)">47</text></g><g clip-path="url(#clip70)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2411.29 2925)">11</text></g><g clip-path="url(#clip71)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3491.8 2922)">13</text></g><g clip-path="url(#clip72)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4572.31 2902)">23</text></g><g clip-path="url(#clip73)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5652.81 2873)">37</text></g><g clip-path="url(#clip74)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6733.32 2792)">76</text></g><g clip-path="url(#clip75)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1447.77 2992)">0</text></g><g clip-path="url(#clip76)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1405.97 2577)">200</text></g><g clip-path="url(#clip77)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1405.97 2162)">400</text></g><g clip-path="url(#clip78)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1405.97 1747)">600</text></g><g clip-path="url(#clip79)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1405.97 1332)">800</text></g><g clip-path="url(#clip80)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1385.07 918)">1000</text></g><g clip-path="url(#clip81)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1385.07 503)">1200</text></g><g clip-path="url(#clip82)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2026.29 3046)">20</text></g><g clip-path="url(#clip83)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3106.79 3046)">32</text></g><g clip-path="url(#clip84)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4176.85 3046)">128</text></g><g clip-path="url(#clip85)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5257.35 3046)">384</text></g><g clip-path="url(#clip86)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6337.86 3046)">512</text></g><g clip-path="url(#clip87)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="41" transform="matrix(6.12323e-17 -1 1 6.12323e-17 1359.97 1844)">Latency (ms)</text></g><g clip-path="url(#clip88)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="41" transform="matrix(1 0 0 1 4067.31 3110)">Sequence Length</text></g><g clip-path="url(#clip89)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="73" transform="matrix(1 0 0 1 3555.4 235)">Intel Ice lake Xeon 8380 </text></g><g clip-path="url(#clip90)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="73" transform="matrix(1 0 0 1 4290.75 235)">-</text></g><g clip-path="url(#clip91)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="73" transform="matrix(1 0 0 1 4329.77 235)">TorchScript</text></g><g clip-path="url(#clip92)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="73" transform="matrix(1 0 0 1 2838.22 324)">Latency Benchmark with varying number of CPU cores involved in the computations</text></g><g clip-path="url(#clip93)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="73" transform="matrix(1 0 0 1 3019.31 414)">(some data might be missing indicating benchmark crashed during runs)</text></g><g clip-path="url(#clip94)"><rect x="3903" y="3206" width="23" height="22" fill="url(#fill95)"/></g><g clip-path="url(#clip96)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3935.68 3229)">1</text></g><g clip-path="url(#clip97)"><rect x="3983" y="3206" width="22" height="22" fill="url(#fill98)"/></g><g clip-path="url(#clip99)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4015.01 3229)">4</text></g><g clip-path="url(#clip100)"><rect x="4062" y="3206" width="23" height="22" fill="url(#fill101)"/></g><g clip-path="url(#clip102)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4094.35 3229)">16</text></g><g clip-path="url(#clip103)"><rect x="4162" y="3206" width="23" height="22" fill="url(#fill104)"/></g><g clip-path="url(#clip105)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4194.59 3229)">40</text></g><g clip-path="url(#clip106)"><rect x="4263" y="3206" width="22" height="22" fill="url(#fill107)"/></g><g clip-path="url(#clip108)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4294.83 3229)">80</text></g><rect x="1264.5" y="131.5" width="5696" height="3152" stroke="#E0E5EB" stroke-width="3.4375" stroke-linejoin="round" stroke-miterlimit="10" fill="none"/></g></svg> | 5 |
0 | hf_public_repos/blog/assets/35_bert_cpu_scaling_part_2 | hf_public_repos/blog/assets/35_bert_cpu_scaling_part_2/baselines/graph_mode_intel_tensorflow_baseline.svg | <svg width="5699" height="3155" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" overflow="hidden"><defs><clipPath id="clip0"><rect x="1335" y="130" width="5699" height="3155"/></clipPath><clipPath id="clip1"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip2"><rect x="1578" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="1734" y1="2824" x2="1734" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill3"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip4"><rect x="1578" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="2814.5" y1="2810" x2="2814.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill5"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip6"><rect x="1578" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="3895" y1="2602" x2="3895" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill7"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip8"><rect x="1578" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="4975.5" y1="1583" x2="4975.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill9"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip10"><rect x="1578" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="6056.5" y1="812" x2="6056.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill11"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip12"><rect x="1578" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="1926.5" y1="2937" x2="1926.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill13"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip14"><rect x="1578" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="3007" y1="2929" x2="3007" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill15"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip16"><rect x="1578" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="4087.5" y1="2839" x2="4087.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill17"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip18"><rect x="1578" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="5168.5" y1="2611" x2="5168.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill19"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip20"><rect x="1578" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="6248.5" y1="2488" x2="6248.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill21"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip22"><rect x="1578" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="2119" y1="2957" x2="2119" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill23"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip24"><rect x="1578" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="3199.5" y1="2949" x2="3199.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill25"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip26"><rect x="1578" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="4280" y1="2879" x2="4280" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill27"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip28"><rect x="1578" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="5360.5" y1="2812" x2="5360.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill29"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip30"><rect x="1578" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="6441.5" y1="2770" x2="6441.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill31"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip32"><rect x="1578" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="2311.5" y1="2964" x2="2311.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill33"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip34"><rect x="1578" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="3392" y1="2957" x2="3392" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill35"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip36"><rect x="1578" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="4472.5" y1="2898" x2="4472.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill37"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip38"><rect x="1578" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="5553.5" y1="2860" x2="5553.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill39"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip40"><rect x="1578" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="6633.5" y1="2856" x2="6633.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill41"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip42"><rect x="1578" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="2504" y1="2960" x2="2504" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill43"><stop offset="0" stop-color="#71A6DB"/><stop offset="0.5" stop-color="#559BDB"/><stop offset="1" stop-color="#438AC9"/></linearGradient><clipPath id="clip44"><rect x="1578" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="3584.5" y1="2953" x2="3584.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill45"><stop offset="0" stop-color="#71A6DB"/><stop offset="0.5" stop-color="#559BDB"/><stop offset="1" stop-color="#438AC9"/></linearGradient><clipPath id="clip46"><rect x="1578" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="4665.5" y1="2896" x2="4665.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill47"><stop offset="0" stop-color="#71A6DB"/><stop offset="0.5" stop-color="#559BDB"/><stop offset="1" stop-color="#438AC9"/></linearGradient><clipPath id="clip48"><rect x="1578" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="5745.5" y1="2865" x2="5745.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill49"><stop offset="0" stop-color="#71A6DB"/><stop offset="0.5" stop-color="#559BDB"/><stop offset="1" stop-color="#438AC9"/></linearGradient><clipPath id="clip50"><rect x="1578" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="6826.5" y1="2857" x2="6826.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill51"><stop offset="0" stop-color="#71A6DB"/><stop offset="0.5" stop-color="#559BDB"/><stop offset="1" stop-color="#438AC9"/></linearGradient><clipPath id="clip52"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip53"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip54"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip55"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip56"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip57"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip58"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip59"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip60"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip61"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip62"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip63"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip64"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip65"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip66"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip67"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip68"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip69"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip70"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip71"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip72"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip73"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip74"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip75"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip76"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip77"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip78"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip79"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip80"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip81"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip82"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip83"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip84"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip85"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip86"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip87"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip88"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip89"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip90"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip91"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip92"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip93"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip94"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip95"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip96"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip97"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip98"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip99"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip100"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><linearGradient x1="3986.5" y1="3206" x2="3986.5" y2="3228" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill101"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip102"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip103"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><linearGradient x1="4066" y1="3206" x2="4066" y2="3228" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill104"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip105"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip106"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><linearGradient x1="4145.5" y1="3206" x2="4145.5" y2="3228" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill107"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip108"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip109"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><linearGradient x1="4245.5" y1="3206" x2="4245.5" y2="3228" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill110"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip111"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip112"><rect x="1335" y="130" width="5696" height="3153"/></clipPath><linearGradient x1="4345.5" y1="3206" x2="4345.5" y2="3228" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill113"><stop offset="0" stop-color="#71A6DB"/><stop offset="0.5" stop-color="#559BDB"/><stop offset="1" stop-color="#438AC9"/></linearGradient><clipPath id="clip114"><rect x="1335" y="130" width="5696" height="3153"/></clipPath></defs><g clip-path="url(#clip0)" transform="translate(-1335 -130)"><rect x="1336" y="131" width="5696" height="3152" fill="#FFFFFF"/><g clip-path="url(#clip1)"><path d="M1578.5 2703.59 6981.5 2703.59M1578.5 2427.58 6981.5 2427.58M1578.5 2150.57 6981.5 2150.57M1578.5 1874.56 6981.5 1874.56M1578.5 1597.55 6981.5 1597.55M1578.5 1320.54 6981.5 1320.54M1578.5 1044.53 6981.5 1044.53M1578.5 767.525 6981.5 767.525M1578.5 491.5 6981.5 491.5" stroke="#E0E5EB" stroke-width="3.4375" stroke-linejoin="round" stroke-miterlimit="10" fill="none"/></g><g clip-path="url(#clip2)"><rect x="1656" y="2824" width="156" height="156" fill="url(#fill3)"/></g><g clip-path="url(#clip4)"><rect x="2737" y="2810" width="155" height="170" fill="url(#fill5)"/></g><g clip-path="url(#clip6)"><rect x="3817" y="2602" width="156" height="378" fill="url(#fill7)"/></g><g clip-path="url(#clip8)"><rect x="4898" y="1583" width="155" height="1397" fill="url(#fill9)"/></g><g clip-path="url(#clip10)"><rect x="5979" y="812" width="155" height="2168" fill="url(#fill11)"/></g><g clip-path="url(#clip12)"><rect x="1849" y="2937" width="155" height="43" fill="url(#fill13)"/></g><g clip-path="url(#clip14)"><rect x="2929" y="2929" width="156" height="50.9998" fill="url(#fill15)"/></g><g clip-path="url(#clip16)"><rect x="4010" y="2839" width="155" height="141" fill="url(#fill17)"/></g><g clip-path="url(#clip18)"><rect x="5091" y="2611" width="155" height="369" fill="url(#fill19)"/></g><g clip-path="url(#clip20)"><rect x="6171" y="2488" width="155" height="492" fill="url(#fill21)"/></g><g clip-path="url(#clip22)"><rect x="2041" y="2957" width="156" height="23" fill="url(#fill23)"/></g><g clip-path="url(#clip24)"><rect x="3122" y="2949" width="155" height="31" fill="url(#fill25)"/></g><g clip-path="url(#clip26)"><rect x="4202" y="2879" width="156" height="101" fill="url(#fill27)"/></g><g clip-path="url(#clip28)"><rect x="5283" y="2812" width="155" height="168" fill="url(#fill29)"/></g><g clip-path="url(#clip30)"><rect x="6364" y="2770" width="155" height="210" fill="url(#fill31)"/></g><g clip-path="url(#clip32)"><rect x="2234" y="2964" width="155" height="15.9998" fill="url(#fill33)"/></g><g clip-path="url(#clip34)"><rect x="3314" y="2957" width="156" height="23" fill="url(#fill35)"/></g><g clip-path="url(#clip36)"><rect x="4395" y="2898" width="155" height="81.9998" fill="url(#fill37)"/></g><g clip-path="url(#clip38)"><rect x="5476" y="2860" width="155" height="120" fill="url(#fill39)"/></g><g clip-path="url(#clip40)"><rect x="6556" y="2856" width="155" height="124" fill="url(#fill41)"/></g><g clip-path="url(#clip42)"><rect x="2426" y="2960" width="156" height="20" fill="url(#fill43)"/></g><g clip-path="url(#clip44)"><rect x="3507" y="2953" width="155" height="26.9998" fill="url(#fill45)"/></g><g clip-path="url(#clip46)"><rect x="4588" y="2896" width="155" height="83.9998" fill="url(#fill47)"/></g><g clip-path="url(#clip48)"><rect x="5668" y="2865" width="155" height="115" fill="url(#fill49)"/></g><g clip-path="url(#clip50)"><rect x="6749" y="2857" width="155" height="123" fill="url(#fill51)"/></g><g clip-path="url(#clip52)"><path d="M1578.5 2980.5 6981.5 2980.5" stroke="#E0E5EB" stroke-width="3.4375" stroke-linejoin="round" stroke-miterlimit="10" fill="none" fill-rule="evenodd"/></g><g clip-path="url(#clip53)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1702.75 2792)">113</text></g><g clip-path="url(#clip54)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2783.26 2778)">124</text></g><g clip-path="url(#clip55)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3863.76 2570)">273</text></g><g clip-path="url(#clip56)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4933.82 1551)">1010</text></g><g clip-path="url(#clip57)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6014.33 780)">1568</text></g><g clip-path="url(#clip58)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1905.71 2905)">32</text></g><g clip-path="url(#clip59)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2986.21 2897)">37</text></g><g clip-path="url(#clip60)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4056.27 2807)">103</text></g><g clip-path="url(#clip61)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5136.77 2579)">267</text></g><g clip-path="url(#clip62)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6217.28 2456)">356</text></g><g clip-path="url(#clip63)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2098.21 2925)">17</text></g><g clip-path="url(#clip64)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3178.72 2917)">23</text></g><g clip-path="url(#clip65)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4259.22 2847)">74</text></g><g clip-path="url(#clip66)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5329.28 2780)">122</text></g><g clip-path="url(#clip67)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6409.78 2738)">152</text></g><g clip-path="url(#clip68)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2290.71 2932)">12</text></g><g clip-path="url(#clip69)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3371.22 2925)">17</text></g><g clip-path="url(#clip70)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4451.73 2866)">60</text></g><g clip-path="url(#clip71)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5532.23 2828)">87</text></g><g clip-path="url(#clip72)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6612.74 2824)">90</text></g><g clip-path="url(#clip73)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2483.22 2928)">15</text></g><g clip-path="url(#clip74)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3563.72 2921)">20</text></g><g clip-path="url(#clip75)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4644.23 2864)">61</text></g><g clip-path="url(#clip76)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5724.74 2833)">83</text></g><g clip-path="url(#clip77)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6805.24 2825)">90</text></g><g clip-path="url(#clip78)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1519.69 2992)">0</text></g><g clip-path="url(#clip79)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1477.89 2715)">200</text></g><g clip-path="url(#clip80)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1477.89 2439)">400</text></g><g clip-path="url(#clip81)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1477.89 2162)">600</text></g><g clip-path="url(#clip82)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1477.89 1886)">800</text></g><g clip-path="url(#clip83)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1456.99 1609)">1000</text></g><g clip-path="url(#clip84)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1456.99 1332)">1200</text></g><g clip-path="url(#clip85)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1456.99 1056)">1400</text></g><g clip-path="url(#clip86)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1456.99 779)">1600</text></g><g clip-path="url(#clip87)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1456.99 503)">1800</text></g><g clip-path="url(#clip88)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2098.21 3046)">20</text></g><g clip-path="url(#clip89)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3178.72 3046)">32</text></g><g clip-path="url(#clip90)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4248.77 3046)">128</text></g><g clip-path="url(#clip91)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5329.28 3046)">384</text></g><g clip-path="url(#clip92)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6409.78 3046)">512</text></g><g clip-path="url(#clip93)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="41" transform="matrix(6.12323e-17 -1 1 6.12323e-17 1431.9 1844)">Latency (ms)</text></g><g clip-path="url(#clip94)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="41" transform="matrix(1 0 0 1 4139.23 3110)">Sequence Length</text></g><g clip-path="url(#clip95)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="73" transform="matrix(1 0 0 1 3546.1 235)">Intel Ice lake Xeon 8380 </text></g><g clip-path="url(#clip96)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="73" transform="matrix(1 0 0 1 4281.45 235)">-</text></g><g clip-path="url(#clip97)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="73" transform="matrix(1 0 0 1 4320.48 235)">Intel TensorFlow</text></g><g clip-path="url(#clip98)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="73" transform="matrix(1 0 0 1 2910.14 324)">Latency Benchmark with varying number of CPU cores involved in the computations</text></g><g clip-path="url(#clip99)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="73" transform="matrix(1 0 0 1 3091.23 414)">(some data might be missing indicating benchmark crashed during runs)</text></g><g clip-path="url(#clip100)"><rect x="3975" y="3206" width="23" height="22" fill="url(#fill101)"/></g><g clip-path="url(#clip102)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4007.6 3229)">1</text></g><g clip-path="url(#clip103)"><rect x="4055" y="3206" width="22" height="22" fill="url(#fill104)"/></g><g clip-path="url(#clip105)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4086.94 3229)">4</text></g><g clip-path="url(#clip106)"><rect x="4134" y="3206" width="23" height="22" fill="url(#fill107)"/></g><g clip-path="url(#clip108)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4166.27 3229)">16</text></g><g clip-path="url(#clip109)"><rect x="4234" y="3206" width="23" height="22" fill="url(#fill110)"/></g><g clip-path="url(#clip111)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4266.51 3229)">40</text></g><g clip-path="url(#clip112)"><rect x="4334" y="3206" width="23" height="22" fill="url(#fill113)"/></g><g clip-path="url(#clip114)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4366.75 3229)">80</text></g><rect x="1336.5" y="131.5" width="5696" height="3152" stroke="#E0E5EB" stroke-width="3.4375" stroke-linejoin="round" stroke-miterlimit="10" fill="none"/></g></svg> | 6 |
0 | hf_public_repos/blog/assets/35_bert_cpu_scaling_part_2 | hf_public_repos/blog/assets/35_bert_cpu_scaling_part_2/baselines/eager_mode_tensorflow_baseline.svg | <svg width="5750" height="3155" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" overflow="hidden"><defs><clipPath id="clip0"><rect x="1263" y="130" width="5750" height="3155"/></clipPath><clipPath id="clip1"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip2"><rect x="1506" y="491" width="5457" height="2492"/></clipPath><linearGradient x1="1663.5" y1="2816" x2="1663.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill3"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip4"><rect x="1506" y="491" width="5457" height="2492"/></clipPath><linearGradient x1="2754.5" y1="2791" x2="2754.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill5"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip6"><rect x="1506" y="491" width="5457" height="2492"/></clipPath><linearGradient x1="3845" y1="2553" x2="3845" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill7"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip8"><rect x="1506" y="491" width="5457" height="2492"/></clipPath><linearGradient x1="4935.5" y1="1669" x2="4935.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill9"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip10"><rect x="1506" y="491" width="5457" height="2492"/></clipPath><linearGradient x1="6026.5" y1="979" x2="6026.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill11"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip12"><rect x="1506" y="491" width="5457" height="2492"/></clipPath><linearGradient x1="1858" y1="2891" x2="1858" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill13"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip14"><rect x="1506" y="491" width="5457" height="2492"/></clipPath><linearGradient x1="2948.5" y1="2858" x2="2948.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill15"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip16"><rect x="1506" y="491" width="5457" height="2492"/></clipPath><linearGradient x1="4039.5" y1="2773" x2="4039.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill17"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip18"><rect x="1506" y="491" width="5457" height="2492"/></clipPath><linearGradient x1="5130" y1="2591" x2="5130" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill19"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip20"><rect x="1506" y="491" width="5457" height="2492"/></clipPath><linearGradient x1="6220.5" y1="2444" x2="6220.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill21"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip22"><rect x="1506" y="491" width="5457" height="2492"/></clipPath><linearGradient x1="2052.5" y1="2908" x2="2052.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill23"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip24"><rect x="1506" y="491" width="5457" height="2492"/></clipPath><linearGradient x1="3143" y1="2893" x2="3143" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill25"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip26"><rect x="1506" y="491" width="5457" height="2492"/></clipPath><linearGradient x1="4233.5" y1="2842" x2="4233.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill27"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip28"><rect x="1506" y="491" width="5457" height="2492"/></clipPath><linearGradient x1="5324.5" y1="2768" x2="5324.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill29"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip30"><rect x="1506" y="491" width="5457" height="2492"/></clipPath><linearGradient x1="6415" y1="2743" x2="6415" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill31"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip32"><rect x="1506" y="491" width="5457" height="2492"/></clipPath><linearGradient x1="2246.5" y1="2916" x2="2246.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill33"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip34"><rect x="1506" y="491" width="5457" height="2492"/></clipPath><linearGradient x1="3337.5" y1="2903" x2="3337.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill35"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip36"><rect x="1506" y="491" width="5457" height="2492"/></clipPath><linearGradient x1="4428" y1="2876" x2="4428" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill37"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip38"><rect x="1506" y="491" width="5457" height="2492"/></clipPath><linearGradient x1="5518.5" y1="2845" x2="5518.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill39"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip40"><rect x="1506" y="491" width="5457" height="2492"/></clipPath><linearGradient x1="6609.5" y1="2828" x2="6609.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill41"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip42"><rect x="1506" y="491" width="5457" height="2492"/></clipPath><linearGradient x1="2441" y1="2914" x2="2441" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill43"><stop offset="0" stop-color="#71A6DB"/><stop offset="0.5" stop-color="#559BDB"/><stop offset="1" stop-color="#438AC9"/></linearGradient><clipPath id="clip44"><rect x="1506" y="491" width="5457" height="2492"/></clipPath><linearGradient x1="3531.5" y1="2894" x2="3531.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill45"><stop offset="0" stop-color="#71A6DB"/><stop offset="0.5" stop-color="#559BDB"/><stop offset="1" stop-color="#438AC9"/></linearGradient><clipPath id="clip46"><rect x="1506" y="491" width="5457" height="2492"/></clipPath><linearGradient x1="4622.5" y1="2840" x2="4622.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill47"><stop offset="0" stop-color="#71A6DB"/><stop offset="0.5" stop-color="#559BDB"/><stop offset="1" stop-color="#438AC9"/></linearGradient><clipPath id="clip48"><rect x="1506" y="491" width="5457" height="2492"/></clipPath><linearGradient x1="5713" y1="2803" x2="5713" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill49"><stop offset="0" stop-color="#71A6DB"/><stop offset="0.5" stop-color="#559BDB"/><stop offset="1" stop-color="#438AC9"/></linearGradient><clipPath id="clip50"><rect x="1506" y="491" width="5457" height="2492"/></clipPath><linearGradient x1="6803.5" y1="2793" x2="6803.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill51"><stop offset="0" stop-color="#71A6DB"/><stop offset="0.5" stop-color="#559BDB"/><stop offset="1" stop-color="#438AC9"/></linearGradient><clipPath id="clip52"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip53"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip54"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip55"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip56"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip57"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip58"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip59"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip60"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip61"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip62"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip63"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip64"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip65"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip66"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip67"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip68"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip69"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip70"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip71"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip72"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip73"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip74"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip75"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip76"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip77"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip78"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip79"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip80"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip81"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip82"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip83"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip84"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip85"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip86"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip87"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip88"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip89"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip90"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip91"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip92"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip93"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip94"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip95"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip96"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip97"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><linearGradient x1="3940" y1="3206" x2="3940" y2="3228" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill98"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip99"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip100"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><linearGradient x1="4019.5" y1="3206" x2="4019.5" y2="3228" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill101"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip102"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip103"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><linearGradient x1="4098.5" y1="3206" x2="4098.5" y2="3228" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill104"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip105"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip106"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><linearGradient x1="4199" y1="3206" x2="4199" y2="3228" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill107"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip108"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip109"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><linearGradient x1="4299.5" y1="3206" x2="4299.5" y2="3228" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill110"><stop offset="0" stop-color="#71A6DB"/><stop offset="0.5" stop-color="#559BDB"/><stop offset="1" stop-color="#438AC9"/></linearGradient><clipPath id="clip111"><rect x="1263" y="130" width="5747" height="3153"/></clipPath></defs><g clip-path="url(#clip0)" transform="translate(-1263 -130)"><rect x="1264" y="131" width="5747" height="3152" fill="#FFFFFF"/><g clip-path="url(#clip1)"><path d="M1506.5 2565.58 6960.5 2565.58M1506.5 2150.57 6960.5 2150.57M1506.5 1735.56 6960.5 1735.56M1506.5 1320.54 6960.5 1320.54M1506.5 905.53 6960.5 905.53M1506.5 491.5 6960.5 491.5" stroke="#E0E5EB" stroke-width="3.4375" stroke-linejoin="round" stroke-miterlimit="10" fill="none"/></g><g clip-path="url(#clip2)"><rect x="1585" y="2816" width="157" height="164" fill="url(#fill3)"/></g><g clip-path="url(#clip4)"><rect x="2676" y="2791" width="157" height="189" fill="url(#fill5)"/></g><g clip-path="url(#clip6)"><rect x="3767" y="2553" width="156" height="427" fill="url(#fill7)"/></g><g clip-path="url(#clip8)"><rect x="4857" y="1669" width="157" height="1311" fill="url(#fill9)"/></g><g clip-path="url(#clip10)"><rect x="5948" y="979" width="157" height="2001" fill="url(#fill11)"/></g><g clip-path="url(#clip12)"><rect x="1780" y="2891" width="156" height="89" fill="url(#fill13)"/></g><g clip-path="url(#clip14)"><rect x="2870" y="2858" width="157" height="122" fill="url(#fill15)"/></g><g clip-path="url(#clip16)"><rect x="3961" y="2773" width="157" height="207" fill="url(#fill17)"/></g><g clip-path="url(#clip18)"><rect x="5052" y="2591" width="156" height="389" fill="url(#fill19)"/></g><g clip-path="url(#clip20)"><rect x="6142" y="2444" width="157" height="536" fill="url(#fill21)"/></g><g clip-path="url(#clip22)"><rect x="1974" y="2908" width="157" height="71.9998" fill="url(#fill23)"/></g><g clip-path="url(#clip24)"><rect x="3065" y="2893" width="156" height="87" fill="url(#fill25)"/></g><g clip-path="url(#clip26)"><rect x="4155" y="2842" width="157" height="138" fill="url(#fill27)"/></g><g clip-path="url(#clip28)"><rect x="5246" y="2768" width="157" height="212" fill="url(#fill29)"/></g><g clip-path="url(#clip30)"><rect x="6337" y="2743" width="156" height="237" fill="url(#fill31)"/></g><g clip-path="url(#clip32)"><rect x="2168" y="2916" width="157" height="64" fill="url(#fill33)"/></g><g clip-path="url(#clip34)"><rect x="3259" y="2903" width="157" height="77" fill="url(#fill35)"/></g><g clip-path="url(#clip36)"><rect x="4350" y="2876" width="156" height="104" fill="url(#fill37)"/></g><g clip-path="url(#clip38)"><rect x="5440" y="2845" width="157" height="135" fill="url(#fill39)"/></g><g clip-path="url(#clip40)"><rect x="6531" y="2828" width="157" height="152" fill="url(#fill41)"/></g><g clip-path="url(#clip42)"><rect x="2363" y="2914" width="156" height="66" fill="url(#fill43)"/></g><g clip-path="url(#clip44)"><rect x="3453" y="2894" width="157" height="86" fill="url(#fill45)"/></g><g clip-path="url(#clip46)"><rect x="4544" y="2840" width="157" height="140" fill="url(#fill47)"/></g><g clip-path="url(#clip48)"><rect x="5635" y="2803" width="156" height="177" fill="url(#fill49)"/></g><g clip-path="url(#clip50)"><rect x="6725" y="2793" width="157" height="187" fill="url(#fill51)"/></g><g clip-path="url(#clip52)"><path d="M1506.5 2980.5 6960.5 2980.5" stroke="#E0E5EB" stroke-width="3.4375" stroke-linejoin="round" stroke-miterlimit="10" fill="none" fill-rule="evenodd"/></g><g clip-path="url(#clip53)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1632.29 2784)">198</text></g><g clip-path="url(#clip54)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2722.95 2759)">228</text></g><g clip-path="url(#clip55)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3813.61 2521)">516</text></g><g clip-path="url(#clip56)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4893.82 1638)">1580</text></g><g clip-path="url(#clip57)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5984.48 947)">2412</text></g><g clip-path="url(#clip58)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1826.6 2859)">107</text></g><g clip-path="url(#clip59)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2917.26 2826)">148</text></g><g clip-path="url(#clip60)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4007.92 2741)">250</text></g><g clip-path="url(#clip61)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5098.58 2559)">469</text></g><g clip-path="url(#clip62)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6189.24 2412)">646</text></g><g clip-path="url(#clip63)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2031.36 2876)">87</text></g><g clip-path="url(#clip64)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3111.57 2861)">105</text></g><g clip-path="url(#clip65)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4202.23 2810)">167</text></g><g clip-path="url(#clip66)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5292.89 2736)">256</text></g><g clip-path="url(#clip67)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6383.55 2711)">286</text></g><g clip-path="url(#clip68)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2225.68 2884)">78</text></g><g clip-path="url(#clip69)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3316.34 2872)">93</text></g><g clip-path="url(#clip70)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4396.55 2844)">126</text></g><g clip-path="url(#clip71)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5487.21 2813)">163</text></g><g clip-path="url(#clip72)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6577.86 2796)">184</text></g><g clip-path="url(#clip73)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2419.99 2882)">80</text></g><g clip-path="url(#clip74)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3500.2 2862)">104</text></g><g clip-path="url(#clip75)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4590.86 2808)">169</text></g><g clip-path="url(#clip76)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5681.52 2771)">214</text></g><g clip-path="url(#clip77)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6772.18 2761)">226</text></g><g clip-path="url(#clip78)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1447.77 2992)">0</text></g><g clip-path="url(#clip79)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1405.97 2577)">500</text></g><g clip-path="url(#clip80)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1385.07 2162)">1000</text></g><g clip-path="url(#clip81)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1385.07 1747)">1500</text></g><g clip-path="url(#clip82)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1385.07 1332)">2000</text></g><g clip-path="url(#clip83)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1385.07 918)">2500</text></g><g clip-path="url(#clip84)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1385.07 503)">3000</text></g><g clip-path="url(#clip85)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2031.36 3046)">20</text></g><g clip-path="url(#clip86)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3122.02 3046)">32</text></g><g clip-path="url(#clip87)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4202.23 3046)">128</text></g><g clip-path="url(#clip88)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5292.89 3046)">384</text></g><g clip-path="url(#clip89)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6383.55 3046)">512</text></g><g clip-path="url(#clip90)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="41" transform="matrix(6.12323e-17 -1 1 6.12323e-17 1359.97 1844)">Latency (ms)</text></g><g clip-path="url(#clip91)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="41" transform="matrix(1 0 0 1 4092.69 3110)">Sequence Length</text></g><g clip-path="url(#clip92)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="73" transform="matrix(1 0 0 1 3484.39 235)">Intel Ice lake Xeon 8380 </text></g><g clip-path="url(#clip93)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="73" transform="matrix(1 0 0 1 4219.74 235)">-</text></g><g clip-path="url(#clip94)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="73" transform="matrix(1 0 0 1 4258.77 235)">TensorFlow Eager </text></g><g clip-path="url(#clip95)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="73" transform="matrix(1 0 0 1 2863.6 324)">Latency Benchmark with varying number of CPU cores involved in the computations</text></g><g clip-path="url(#clip96)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="73" transform="matrix(1 0 0 1 3044.69 414)">(some data might be missing indicating benchmark crashed during runs)</text></g><g clip-path="url(#clip97)"><rect x="3929" y="3206" width="22" height="22" fill="url(#fill98)"/></g><g clip-path="url(#clip99)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3961.06 3229)">1</text></g><g clip-path="url(#clip100)"><rect x="4008" y="3206" width="23" height="22" fill="url(#fill101)"/></g><g clip-path="url(#clip102)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4040.4 3229)">4</text></g><g clip-path="url(#clip103)"><rect x="4087" y="3206" width="23.0002" height="22" fill="url(#fill104)"/></g><g clip-path="url(#clip105)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4119.74 3229)">16</text></g><g clip-path="url(#clip106)"><rect x="4188" y="3206" width="22" height="22" fill="url(#fill107)"/></g><g clip-path="url(#clip108)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4219.97 3229)">40</text></g><g clip-path="url(#clip109)"><rect x="4288" y="3206" width="23" height="22" fill="url(#fill110)"/></g><g clip-path="url(#clip111)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4320.21 3229)">80</text></g><rect x="1264.5" y="131.5" width="5747" height="3152" stroke="#E0E5EB" stroke-width="3.4375" stroke-linejoin="round" stroke-miterlimit="10" fill="none"/></g></svg> | 7 |
0 | hf_public_repos/blog/assets/35_bert_cpu_scaling_part_2 | hf_public_repos/blog/assets/35_bert_cpu_scaling_part_2/baselines/eager_mode_pytorch_baseline.svg | <svg width="5699" height="3155" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" overflow="hidden"><defs><clipPath id="clip0"><rect x="1263" y="130" width="5699" height="3155"/></clipPath><clipPath id="clip1"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip2"><rect x="1506" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="1662.5" y1="2791" x2="1662.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill3"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip4"><rect x="1506" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="2742.5" y1="2773" x2="2742.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill5"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip6"><rect x="1506" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="3823.5" y1="2571" x2="3823.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill7"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip8"><rect x="1506" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="4903.5" y1="1589" x2="4903.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill9"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip10"><rect x="1506" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="5984.5" y1="736" x2="5984.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill11"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip12"><rect x="1506" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="1854.5" y1="2931" x2="1854.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill13"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip14"><rect x="1506" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="2935.5" y1="2926" x2="2935.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill15"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip16"><rect x="1506" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="4015.5" y1="2873" x2="4015.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill17"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip18"><rect x="1506" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="5096.5" y1="2590" x2="5096.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill19"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip20"><rect x="1506" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="6176.5" y1="2443" x2="6176.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill21"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip22"><rect x="1506" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="2047.5" y1="2957" x2="2047.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill23"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip24"><rect x="1506" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="3127.5" y1="2954" x2="3127.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill25"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip26"><rect x="1506" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="4208.5" y1="2935" x2="4208.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill27"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip28"><rect x="1506" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="5288.5" y1="2849" x2="5288.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill29"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip30"><rect x="1506" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="6369.5" y1="2806" x2="6369.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill31"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip32"><rect x="1506" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="2239.5" y1="2965" x2="2239.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill33"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip34"><rect x="1506" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="3320.5" y1="2964" x2="3320.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill35"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip36"><rect x="1506" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="4400.5" y1="2954" x2="4400.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill37"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip38"><rect x="1506" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="5481.5" y1="2903" x2="5481.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill39"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip40"><rect x="1506" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="6561.5" y1="2865" x2="6561.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill41"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip42"><rect x="1506" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="2432.5" y1="2961" x2="2432.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill43"><stop offset="0" stop-color="#71A6DB"/><stop offset="0.5" stop-color="#559BDB"/><stop offset="1" stop-color="#438AC9"/></linearGradient><clipPath id="clip44"><rect x="1506" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="3512.5" y1="2958" x2="3512.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill45"><stop offset="0" stop-color="#71A6DB"/><stop offset="0.5" stop-color="#559BDB"/><stop offset="1" stop-color="#438AC9"/></linearGradient><clipPath id="clip46"><rect x="1506" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="4593.5" y1="2945" x2="4593.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill47"><stop offset="0" stop-color="#71A6DB"/><stop offset="0.5" stop-color="#559BDB"/><stop offset="1" stop-color="#438AC9"/></linearGradient><clipPath id="clip48"><rect x="1506" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="5673.5" y1="2911" x2="5673.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill49"><stop offset="0" stop-color="#71A6DB"/><stop offset="0.5" stop-color="#559BDB"/><stop offset="1" stop-color="#438AC9"/></linearGradient><clipPath id="clip50"><rect x="1506" y="491" width="5406" height="2492"/></clipPath><linearGradient x1="6754.5" y1="2874" x2="6754.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill51"><stop offset="0" stop-color="#71A6DB"/><stop offset="0.5" stop-color="#559BDB"/><stop offset="1" stop-color="#438AC9"/></linearGradient><clipPath id="clip52"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip53"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip54"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip55"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip56"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip57"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip58"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip59"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip60"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip61"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip62"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip63"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip64"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip65"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip66"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip67"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip68"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip69"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip70"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip71"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip72"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip73"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip74"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip75"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip76"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip77"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip78"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip79"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip80"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip81"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip82"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip83"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip84"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip85"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip86"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip87"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip88"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip89"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip90"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip91"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip92"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip93"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip94"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip95"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip96"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip97"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip98"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip99"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip100"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><linearGradient x1="3914.5" y1="3206" x2="3914.5" y2="3228" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill101"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip102"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip103"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><linearGradient x1="3994" y1="3206" x2="3994" y2="3228" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill104"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip105"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip106"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><linearGradient x1="4073.5" y1="3206" x2="4073.5" y2="3228" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill107"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip108"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip109"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><linearGradient x1="4173.5" y1="3206" x2="4173.5" y2="3228" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill110"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip111"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><clipPath id="clip112"><rect x="1263" y="130" width="5696" height="3153"/></clipPath><linearGradient x1="4274" y1="3206" x2="4274" y2="3228" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill113"><stop offset="0" stop-color="#71A6DB"/><stop offset="0.5" stop-color="#559BDB"/><stop offset="1" stop-color="#438AC9"/></linearGradient><clipPath id="clip114"><rect x="1263" y="130" width="5696" height="3153"/></clipPath></defs><g clip-path="url(#clip0)" transform="translate(-1263 -130)"><rect x="1264" y="131" width="5696" height="3152" fill="#FFFFFF"/><g clip-path="url(#clip1)"><path d="M1506.5 2703.59 6909.5 2703.59M1506.5 2427.58 6909.5 2427.58M1506.5 2150.57 6909.5 2150.57M1506.5 1874.56 6909.5 1874.56M1506.5 1597.55 6909.5 1597.55M1506.5 1320.54 6909.5 1320.54M1506.5 1044.53 6909.5 1044.53M1506.5 767.525 6909.5 767.525M1506.5 491.5 6909.5 491.5" stroke="#E0E5EB" stroke-width="3.4375" stroke-linejoin="round" stroke-miterlimit="10" fill="none"/></g><g clip-path="url(#clip2)"><rect x="1585" y="2791" width="155" height="189" fill="url(#fill3)"/></g><g clip-path="url(#clip4)"><rect x="2665" y="2773" width="155" height="207" fill="url(#fill5)"/></g><g clip-path="url(#clip6)"><rect x="3746" y="2571" width="155" height="409" fill="url(#fill7)"/></g><g clip-path="url(#clip8)"><rect x="4826" y="1589" width="155" height="1391" fill="url(#fill9)"/></g><g clip-path="url(#clip10)"><rect x="5907" y="736" width="155" height="2244" fill="url(#fill11)"/></g><g clip-path="url(#clip12)"><rect x="1777" y="2931" width="155" height="48.9998" fill="url(#fill13)"/></g><g clip-path="url(#clip14)"><rect x="2858" y="2926" width="155" height="54" fill="url(#fill15)"/></g><g clip-path="url(#clip16)"><rect x="3938" y="2873" width="155" height="107" fill="url(#fill17)"/></g><g clip-path="url(#clip18)"><rect x="5019" y="2590" width="155" height="390" fill="url(#fill19)"/></g><g clip-path="url(#clip20)"><rect x="6099" y="2443" width="155" height="537" fill="url(#fill21)"/></g><g clip-path="url(#clip22)"><rect x="1970" y="2957" width="155" height="23" fill="url(#fill23)"/></g><g clip-path="url(#clip24)"><rect x="3050" y="2954" width="155" height="26.0002" fill="url(#fill25)"/></g><g clip-path="url(#clip26)"><rect x="4131" y="2935" width="155" height="45" fill="url(#fill27)"/></g><g clip-path="url(#clip28)"><rect x="5211" y="2849" width="155" height="131" fill="url(#fill29)"/></g><g clip-path="url(#clip30)"><rect x="6292" y="2806" width="155" height="174" fill="url(#fill31)"/></g><g clip-path="url(#clip32)"><rect x="2162" y="2965" width="155" height="15.0002" fill="url(#fill33)"/></g><g clip-path="url(#clip34)"><rect x="3243" y="2964" width="155" height="15.9998" fill="url(#fill35)"/></g><g clip-path="url(#clip36)"><rect x="4323" y="2954" width="155" height="26.0002" fill="url(#fill37)"/></g><g clip-path="url(#clip38)"><rect x="5404" y="2903" width="155" height="77" fill="url(#fill39)"/></g><g clip-path="url(#clip40)"><rect x="6484" y="2865" width="155" height="115" fill="url(#fill41)"/></g><g clip-path="url(#clip42)"><rect x="2355" y="2961" width="155" height="19" fill="url(#fill43)"/></g><g clip-path="url(#clip44)"><rect x="3435" y="2958" width="155" height="22" fill="url(#fill45)"/></g><g clip-path="url(#clip46)"><rect x="4516" y="2945" width="155" height="35" fill="url(#fill47)"/></g><g clip-path="url(#clip48)"><rect x="5596" y="2911" width="155" height="69" fill="url(#fill49)"/></g><g clip-path="url(#clip50)"><rect x="6677" y="2874" width="155" height="106" fill="url(#fill51)"/></g><g clip-path="url(#clip52)"><path d="M1506.5 2980.5 6909.5 2980.5" stroke="#E0E5EB" stroke-width="3.4375" stroke-linejoin="round" stroke-miterlimit="10" fill="none" fill-rule="evenodd"/></g><g clip-path="url(#clip53)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1630.83 2759)">137</text></g><g clip-path="url(#clip54)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2711.34 2741)">150</text></g><g clip-path="url(#clip55)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3791.84 2539)">296</text></g><g clip-path="url(#clip56)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4861.9 1557)">1006</text></g><g clip-path="url(#clip57)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5942.4 704)">1623</text></g><g clip-path="url(#clip58)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1833.78 2899)">36</text></g><g clip-path="url(#clip59)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2914.29 2894)">39</text></g><g clip-path="url(#clip60)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3994.79 2841)">77</text></g><g clip-path="url(#clip61)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5064.85 2558)">282</text></g><g clip-path="url(#clip62)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6145.36 2411)">388</text></g><g clip-path="url(#clip63)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2026.29 2925)">17</text></g><g clip-path="url(#clip64)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3106.79 2922)">19</text></g><g clip-path="url(#clip65)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4187.3 2903)">33</text></g><g clip-path="url(#clip66)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5267.8 2817)">95</text></g><g clip-path="url(#clip67)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6337.86 2774)">126</text></g><g clip-path="url(#clip68)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2218.79 2933)">11</text></g><g clip-path="url(#clip69)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3299.3 2932)">12</text></g><g clip-path="url(#clip70)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4379.8 2923)">19</text></g><g clip-path="url(#clip71)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5460.31 2871)">56</text></g><g clip-path="url(#clip72)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6540.81 2833)">83</text></g><g clip-path="url(#clip73)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2411.29 2929)">14</text></g><g clip-path="url(#clip74)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3491.8 2926)">16</text></g><g clip-path="url(#clip75)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4572.31 2913)">25</text></g><g clip-path="url(#clip76)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5652.81 2879)">50</text></g><g clip-path="url(#clip77)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6733.32 2842)">77</text></g><g clip-path="url(#clip78)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1447.77 2992)">0</text></g><g clip-path="url(#clip79)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1405.97 2715)">200</text></g><g clip-path="url(#clip80)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1405.97 2439)">400</text></g><g clip-path="url(#clip81)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1405.97 2162)">600</text></g><g clip-path="url(#clip82)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1405.97 1886)">800</text></g><g clip-path="url(#clip83)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1385.07 1609)">1000</text></g><g clip-path="url(#clip84)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1385.07 1332)">1200</text></g><g clip-path="url(#clip85)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1385.07 1056)">1400</text></g><g clip-path="url(#clip86)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1385.07 779)">1600</text></g><g clip-path="url(#clip87)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1385.07 503)">1800</text></g><g clip-path="url(#clip88)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2026.29 3046)">20</text></g><g clip-path="url(#clip89)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3106.79 3046)">32</text></g><g clip-path="url(#clip90)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4176.85 3046)">128</text></g><g clip-path="url(#clip91)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5257.35 3046)">384</text></g><g clip-path="url(#clip92)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6337.86 3046)">512</text></g><g clip-path="url(#clip93)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="41" transform="matrix(6.12323e-17 -1 1 6.12323e-17 1359.97 1844)">Latency (ms)</text></g><g clip-path="url(#clip94)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="41" transform="matrix(1 0 0 1 4067.31 3110)">Sequence Length</text></g><g clip-path="url(#clip95)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="73" transform="matrix(1 0 0 1 3605.22 235)">Intel Ice lake Xeon 8380 </text></g><g clip-path="url(#clip96)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="73" transform="matrix(1 0 0 1 4340.57 235)">-</text></g><g clip-path="url(#clip97)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="73" transform="matrix(1 0 0 1 4379.59 235)">PyTorch</text></g><g clip-path="url(#clip98)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="73" transform="matrix(1 0 0 1 2838.22 324)">Latency Benchmark with varying number of CPU cores involved in the computations</text></g><g clip-path="url(#clip99)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="73" transform="matrix(1 0 0 1 3019.31 414)">(some data might be missing indicating benchmark crashed during runs)</text></g><g clip-path="url(#clip100)"><rect x="3903" y="3206" width="23" height="22" fill="url(#fill101)"/></g><g clip-path="url(#clip102)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3935.68 3229)">1</text></g><g clip-path="url(#clip103)"><rect x="3983" y="3206" width="22" height="22" fill="url(#fill104)"/></g><g clip-path="url(#clip105)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4015.01 3229)">4</text></g><g clip-path="url(#clip106)"><rect x="4062" y="3206" width="23" height="22" fill="url(#fill107)"/></g><g clip-path="url(#clip108)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4094.35 3229)">16</text></g><g clip-path="url(#clip109)"><rect x="4162" y="3206" width="23" height="22" fill="url(#fill110)"/></g><g clip-path="url(#clip111)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4194.59 3229)">40</text></g><g clip-path="url(#clip112)"><rect x="4263" y="3206" width="22" height="22" fill="url(#fill113)"/></g><g clip-path="url(#clip114)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4294.83 3229)">80</text></g><rect x="1264.5" y="131.5" width="5696" height="3152" stroke="#E0E5EB" stroke-width="3.4375" stroke-linejoin="round" stroke-miterlimit="10" fill="none"/></g></svg> | 8 |
0 | hf_public_repos/blog/assets/35_bert_cpu_scaling_part_2 | hf_public_repos/blog/assets/35_bert_cpu_scaling_part_2/baselines/eager_mode_intel_tensorflow_baseline.svg | <svg width="5750" height="3155" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" overflow="hidden"><defs><clipPath id="clip0"><rect x="1263" y="130" width="5750" height="3155"/></clipPath><clipPath id="clip1"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip2"><rect x="1506" y="491" width="5457" height="2492"/></clipPath><linearGradient x1="1663.5" y1="2782" x2="1663.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill3"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip4"><rect x="1506" y="491" width="5457" height="2492"/></clipPath><linearGradient x1="2754.5" y1="2754" x2="2754.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill5"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip6"><rect x="1506" y="491" width="5457" height="2492"/></clipPath><linearGradient x1="3845" y1="2528" x2="3845" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill7"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip8"><rect x="1506" y="491" width="5457" height="2492"/></clipPath><linearGradient x1="4935.5" y1="1638" x2="4935.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill9"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip10"><rect x="1506" y="491" width="5457" height="2492"/></clipPath><linearGradient x1="6026.5" y1="1039" x2="6026.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill11"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip12"><rect x="1506" y="491" width="5457" height="2492"/></clipPath><linearGradient x1="1858" y1="2878" x2="1858" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill13"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip14"><rect x="1506" y="491" width="5457" height="2492"/></clipPath><linearGradient x1="2948.5" y1="2864" x2="2948.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill15"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip16"><rect x="1506" y="491" width="5457" height="2492"/></clipPath><linearGradient x1="4039.5" y1="2788" x2="4039.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill17"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip18"><rect x="1506" y="491" width="5457" height="2492"/></clipPath><linearGradient x1="5130" y1="2595" x2="5130" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill19"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip20"><rect x="1506" y="491" width="5457" height="2492"/></clipPath><linearGradient x1="6220.5" y1="2469" x2="6220.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill21"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip22"><rect x="1506" y="491" width="5457" height="2492"/></clipPath><linearGradient x1="2052.5" y1="2899" x2="2052.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill23"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip24"><rect x="1506" y="491" width="5457" height="2492"/></clipPath><linearGradient x1="3143" y1="2892" x2="3143" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill25"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip26"><rect x="1506" y="491" width="5457" height="2492"/></clipPath><linearGradient x1="4233.5" y1="2847" x2="4233.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill27"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip28"><rect x="1506" y="491" width="5457" height="2492"/></clipPath><linearGradient x1="5324.5" y1="2775" x2="5324.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill29"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip30"><rect x="1506" y="491" width="5457" height="2492"/></clipPath><linearGradient x1="6415" y1="2742" x2="6415" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill31"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip32"><rect x="1506" y="491" width="5457" height="2492"/></clipPath><linearGradient x1="2246.5" y1="2905" x2="2246.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill33"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip34"><rect x="1506" y="491" width="5457" height="2492"/></clipPath><linearGradient x1="3337.5" y1="2898" x2="3337.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill35"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip36"><rect x="1506" y="491" width="5457" height="2492"/></clipPath><linearGradient x1="4428" y1="2867" x2="4428" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill37"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip38"><rect x="1506" y="491" width="5457" height="2492"/></clipPath><linearGradient x1="5518.5" y1="2832" x2="5518.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill39"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip40"><rect x="1506" y="491" width="5457" height="2492"/></clipPath><linearGradient x1="6609.5" y1="2818" x2="6609.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill41"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip42"><rect x="1506" y="491" width="5457" height="2492"/></clipPath><linearGradient x1="2441" y1="2901" x2="2441" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill43"><stop offset="0" stop-color="#71A6DB"/><stop offset="0.5" stop-color="#559BDB"/><stop offset="1" stop-color="#438AC9"/></linearGradient><clipPath id="clip44"><rect x="1506" y="491" width="5457" height="2492"/></clipPath><linearGradient x1="3531.5" y1="2892" x2="3531.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill45"><stop offset="0" stop-color="#71A6DB"/><stop offset="0.5" stop-color="#559BDB"/><stop offset="1" stop-color="#438AC9"/></linearGradient><clipPath id="clip46"><rect x="1506" y="491" width="5457" height="2492"/></clipPath><linearGradient x1="4622.5" y1="2860" x2="4622.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill47"><stop offset="0" stop-color="#71A6DB"/><stop offset="0.5" stop-color="#559BDB"/><stop offset="1" stop-color="#438AC9"/></linearGradient><clipPath id="clip48"><rect x="1506" y="491" width="5457" height="2492"/></clipPath><linearGradient x1="5713" y1="2834" x2="5713" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill49"><stop offset="0" stop-color="#71A6DB"/><stop offset="0.5" stop-color="#559BDB"/><stop offset="1" stop-color="#438AC9"/></linearGradient><clipPath id="clip50"><rect x="1506" y="491" width="5457" height="2492"/></clipPath><linearGradient x1="6803.5" y1="2817" x2="6803.5" y2="2980" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill51"><stop offset="0" stop-color="#71A6DB"/><stop offset="0.5" stop-color="#559BDB"/><stop offset="1" stop-color="#438AC9"/></linearGradient><clipPath id="clip52"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip53"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip54"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip55"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip56"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip57"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip58"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip59"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip60"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip61"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip62"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip63"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip64"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip65"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip66"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip67"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip68"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip69"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip70"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip71"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip72"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip73"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip74"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip75"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip76"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip77"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip78"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip79"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip80"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip81"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip82"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip83"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip84"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip85"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip86"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip87"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip88"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip89"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip90"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip91"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip92"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip93"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip94"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip95"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip96"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><linearGradient x1="3940" y1="3206" x2="3940" y2="3228" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill97"><stop offset="0" stop-color="#6083CB"/><stop offset="0.5" stop-color="#3E70CA"/><stop offset="1" stop-color="#2E61BA"/></linearGradient><clipPath id="clip98"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip99"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><linearGradient x1="4019.5" y1="3206" x2="4019.5" y2="3228" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill100"><stop offset="0" stop-color="#F18C55"/><stop offset="0.5" stop-color="#F67B28"/><stop offset="1" stop-color="#E56B17"/></linearGradient><clipPath id="clip101"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip102"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><linearGradient x1="4098.5" y1="3206" x2="4098.5" y2="3228" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill103"><stop offset="0" stop-color="#AFAFAF"/><stop offset="0.5" stop-color="#A5A5A5"/><stop offset="1" stop-color="#929292"/></linearGradient><clipPath id="clip104"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip105"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><linearGradient x1="4199" y1="3206" x2="4199" y2="3228" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill106"><stop offset="0" stop-color="#FFC746"/><stop offset="0.5" stop-color="#FFC600"/><stop offset="1" stop-color="#E5B600"/></linearGradient><clipPath id="clip107"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><clipPath id="clip108"><rect x="1263" y="130" width="5747" height="3153"/></clipPath><linearGradient x1="4299.5" y1="3206" x2="4299.5" y2="3228" gradientUnits="userSpaceOnUse" spreadMethod="reflect" id="fill109"><stop offset="0" stop-color="#71A6DB"/><stop offset="0.5" stop-color="#559BDB"/><stop offset="1" stop-color="#438AC9"/></linearGradient><clipPath id="clip110"><rect x="1263" y="130" width="5747" height="3153"/></clipPath></defs><g clip-path="url(#clip0)" transform="translate(-1263 -130)"><rect x="1264" y="131" width="5747" height="3152" fill="#FFFFFF"/><g clip-path="url(#clip1)"><path d="M1506.5 2482.58 6960.5 2482.58M1506.5 1984.57 6960.5 1984.57M1506.5 1486.55 6960.5 1486.55M1506.5 988.533 6960.5 988.533M1506.5 491.5 6960.5 491.5" stroke="#E0E5EB" stroke-width="3.4375" stroke-linejoin="round" stroke-miterlimit="10" fill="none"/></g><g clip-path="url(#clip2)"><rect x="1585" y="2782" width="157" height="198" fill="url(#fill3)"/></g><g clip-path="url(#clip4)"><rect x="2676" y="2754" width="157" height="226" fill="url(#fill5)"/></g><g clip-path="url(#clip6)"><rect x="3767" y="2528" width="156" height="452" fill="url(#fill7)"/></g><g clip-path="url(#clip8)"><rect x="4857" y="1638" width="157" height="1342" fill="url(#fill9)"/></g><g clip-path="url(#clip10)"><rect x="5948" y="1039" width="157" height="1941" fill="url(#fill11)"/></g><g clip-path="url(#clip12)"><rect x="1780" y="2878" width="156" height="102" fill="url(#fill13)"/></g><g clip-path="url(#clip14)"><rect x="2870" y="2864" width="157" height="116" fill="url(#fill15)"/></g><g clip-path="url(#clip16)"><rect x="3961" y="2788" width="157" height="192" fill="url(#fill17)"/></g><g clip-path="url(#clip18)"><rect x="5052" y="2595" width="156" height="385" fill="url(#fill19)"/></g><g clip-path="url(#clip20)"><rect x="6142" y="2469" width="157" height="511" fill="url(#fill21)"/></g><g clip-path="url(#clip22)"><rect x="1974" y="2899" width="157" height="81.0002" fill="url(#fill23)"/></g><g clip-path="url(#clip24)"><rect x="3065" y="2892" width="156" height="88" fill="url(#fill25)"/></g><g clip-path="url(#clip26)"><rect x="4155" y="2847" width="157" height="133" fill="url(#fill27)"/></g><g clip-path="url(#clip28)"><rect x="5246" y="2775" width="157" height="205" fill="url(#fill29)"/></g><g clip-path="url(#clip30)"><rect x="6337" y="2742" width="156" height="238" fill="url(#fill31)"/></g><g clip-path="url(#clip32)"><rect x="2168" y="2905" width="157" height="75" fill="url(#fill33)"/></g><g clip-path="url(#clip34)"><rect x="3259" y="2898" width="157" height="81.9998" fill="url(#fill35)"/></g><g clip-path="url(#clip36)"><rect x="4350" y="2867" width="156" height="113" fill="url(#fill37)"/></g><g clip-path="url(#clip38)"><rect x="5440" y="2832" width="157" height="148" fill="url(#fill39)"/></g><g clip-path="url(#clip40)"><rect x="6531" y="2818" width="157" height="162" fill="url(#fill41)"/></g><g clip-path="url(#clip42)"><rect x="2363" y="2901" width="156" height="79" fill="url(#fill43)"/></g><g clip-path="url(#clip44)"><rect x="3453" y="2892" width="157" height="88" fill="url(#fill45)"/></g><g clip-path="url(#clip46)"><rect x="4544" y="2860" width="157" height="120" fill="url(#fill47)"/></g><g clip-path="url(#clip48)"><rect x="5635" y="2834" width="156" height="146" fill="url(#fill49)"/></g><g clip-path="url(#clip50)"><rect x="6725" y="2817" width="157" height="163" fill="url(#fill51)"/></g><g clip-path="url(#clip52)"><path d="M1506.5 2980.5 6960.5 2980.5" stroke="#E0E5EB" stroke-width="3.4375" stroke-linejoin="round" stroke-miterlimit="10" fill="none" fill-rule="evenodd"/></g><g clip-path="url(#clip53)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1632.29 2750)">199</text></g><g clip-path="url(#clip54)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2722.95 2722)">228</text></g><g clip-path="url(#clip55)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3813.61 2496)">455</text></g><g clip-path="url(#clip56)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4893.82 1606)">1348</text></g><g clip-path="url(#clip57)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5984.48 1007)">1950</text></g><g clip-path="url(#clip58)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1826.6 2846)">103</text></g><g clip-path="url(#clip59)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2917.26 2832)">117</text></g><g clip-path="url(#clip60)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4007.92 2756)">194</text></g><g clip-path="url(#clip61)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5098.58 2563)">387</text></g><g clip-path="url(#clip62)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6189.24 2437)">514</text></g><g clip-path="url(#clip63)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2031.36 2867)">82</text></g><g clip-path="url(#clip64)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3122.02 2860)">88</text></g><g clip-path="url(#clip65)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4202.23 2816)">134</text></g><g clip-path="url(#clip66)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5292.89 2743)">206</text></g><g clip-path="url(#clip67)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6383.55 2710)">240</text></g><g clip-path="url(#clip68)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2225.68 2873)">76</text></g><g clip-path="url(#clip69)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3316.34 2866)">83</text></g><g clip-path="url(#clip70)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4396.55 2835)">114</text></g><g clip-path="url(#clip71)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5487.21 2800)">149</text></g><g clip-path="url(#clip72)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6577.86 2786)">164</text></g><g clip-path="url(#clip73)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2419.99 2869)">79</text></g><g clip-path="url(#clip74)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3510.65 2860)">88</text></g><g clip-path="url(#clip75)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4590.86 2828)">121</text></g><g clip-path="url(#clip76)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5681.52 2802)">147</text></g><g clip-path="url(#clip77)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6772.18 2785)">164</text></g><g clip-path="url(#clip78)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1447.77 2992)">0</text></g><g clip-path="url(#clip79)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1405.97 2494)">500</text></g><g clip-path="url(#clip80)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1385.07 1996)">1000</text></g><g clip-path="url(#clip81)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1385.07 1498)">1500</text></g><g clip-path="url(#clip82)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1385.07 1001)">2000</text></g><g clip-path="url(#clip83)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 1385.07 503)">2500</text></g><g clip-path="url(#clip84)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 2031.36 3046)">20</text></g><g clip-path="url(#clip85)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3122.02 3046)">32</text></g><g clip-path="url(#clip86)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4202.23 3046)">128</text></g><g clip-path="url(#clip87)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 5292.89 3046)">384</text></g><g clip-path="url(#clip88)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 6383.55 3046)">512</text></g><g clip-path="url(#clip89)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="41" transform="matrix(6.12323e-17 -1 1 6.12323e-17 1359.97 1844)">Latency (ms)</text></g><g clip-path="url(#clip90)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="41" transform="matrix(1 0 0 1 4092.69 3110)">Sequence Length</text></g><g clip-path="url(#clip91)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="73" transform="matrix(1 0 0 1 3499.56 235)">Intel Ice lake Xeon 8380 </text></g><g clip-path="url(#clip92)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="73" transform="matrix(1 0 0 1 4234.91 235)">-</text></g><g clip-path="url(#clip93)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="73" transform="matrix(1 0 0 1 4273.94 235)">Intel TensorFlow</text></g><g clip-path="url(#clip94)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="73" transform="matrix(1 0 0 1 2863.6 324)">Latency Benchmark with varying number of CPU cores involved in the computations</text></g><g clip-path="url(#clip95)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="700" font-size="73" transform="matrix(1 0 0 1 3044.69 414)">(some data might be missing indicating benchmark crashed during runs)</text></g><g clip-path="url(#clip96)"><rect x="3929" y="3206" width="22" height="22" fill="url(#fill97)"/></g><g clip-path="url(#clip98)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 3961.06 3229)">1</text></g><g clip-path="url(#clip99)"><rect x="4008" y="3206" width="23" height="22" fill="url(#fill100)"/></g><g clip-path="url(#clip101)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4040.4 3229)">4</text></g><g clip-path="url(#clip102)"><rect x="4087" y="3206" width="23.0002" height="22" fill="url(#fill103)"/></g><g clip-path="url(#clip104)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4119.74 3229)">16</text></g><g clip-path="url(#clip105)"><rect x="4188" y="3206" width="22" height="22" fill="url(#fill106)"/></g><g clip-path="url(#clip107)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4219.97 3229)">40</text></g><g clip-path="url(#clip108)"><rect x="4288" y="3206" width="23" height="22" fill="url(#fill109)"/></g><g clip-path="url(#clip110)"><text fill="#44546A" font-family="Calibri,Calibri_MSFontService,sans-serif" font-weight="400" font-size="41" transform="matrix(1 0 0 1 4320.21 3229)">80</text></g><rect x="1264.5" y="131.5" width="5747" height="3152" stroke="#E0E5EB" stroke-width="3.4375" stroke-linejoin="round" stroke-miterlimit="10" fill="none"/></g></svg> | 9 |
0 | hf_public_repos/audio-transformers-course/chapters/fr | hf_public_repos/audio-transformers-course/chapters/fr/chapter6/tts_datasets.mdx | # Jeux de données de synthèse vocale
La tâche de synthèse vocale comporte une série de défis.
Tout d'abord, tout comme dans la reconnaissance automatique de la parole discutée précédemment, l'alignement entre le texte et la parole peut être délicat.
Cependant, contrairement à l’ASR, le TTS est un problème d’association **un-à-plusieurs**, c'est-à-dire que le même texte peut être synthétisé de différentes manières. Pensez à la diversité des voix et des styles de parole dans le discours que vous entendez quotidiennement : chaque personne a une façon différente de prononcer la même phrase, mais elles sont toutes valides et correctes. Même des sorties différentes (spectrogrammes ou formes d'ondes audio) peuvent correspondre à la même réalité de terrain. Le modèle doit apprendre à générer la durée et le timing corrects pour chaque phonème, mot ou phrase, ce qui peut être difficile, en particulier pour les phrases longues et complexes.
Ensuite, il y a le problème de la dépendance : la langue a un aspect temporel, et comprendre le sens d'une phrase nécessite souvent de tenir compte du contexte des mots environnants. Il est crucial de s'assurer que le modèle de TTS capture et conserve les informations contextuelles sur de longues séquences pour générer un discours cohérent et naturel.
Enfin, l'apprentissage des modèles de TTS nécessite généralement des paires de texte et des enregistrements vocaux correspondants. En plus de cela, pour s'assurer que le modèle peut générer un discours qui sonne naturel pour différents locuteurs et styles de parole, les données doivent contenir des échantillons de discours divers et représentatifs de plusieurs locuteurs. La collecte de ces données est coûteuse, prend du temps et, pour certaines langues, n'est pas réalisable. Vous vous demandez peut-être, pourquoi ne pas simplement prendre un jeu de données conçu pour l'ASR et l'utiliser pour entraîner un modèle TTS ? Malheureusement, ces jeux de données ne sont pas la meilleure option. Les caractéristiques qui le rendent bénéfique pour l'ASR, telles qu'un bruit de fond excessif, sont généralement indésirables dans le TTS. C'est génial de pouvoir transcrire la parole à partir d'un enregistrement bruyant d’une rue par exemple, mais pas tellement si votre assistant vocal vous répond avec des voitures qui klaxonnent et des bruits de travaux en arrière-plan. Néanmoins, certains jeux de données ASR peuvent parfois être utiles pour *finetuner*, car trouver des jeux de données de TTS de haute qualité, multilingues et multi-locuteurs peut être assez difficile.
Explorons quelques ensembles de données adaptés au TTS que vous pouvez trouver sur le *Hub*.
## LJSpeech
[LJSpeech](https://huggingface.co/datasets/lj_speech) est un jeu de données composé de 13 100 échantillons audio de langue anglaise jumelés à leurs transcriptions correspondantes. Le jeu de données contient l'enregistrement d'un seul locuteur lisant des phrases de 7 livres en anglais. LJSpeech est souvent utilisé comme référence pour évaluer les modèles de TTS en raison de sa haute qualité audio et de son contenu linguistique diversifié.
## Multilingual LibriSpeech
[Multilingual LibriSpeech] (https://huggingface.co/datasets/facebook/multilingual_librispeech) est une extension multilingue du jeu de données LibriSpeech, qui est à la base une collection à grande échelle de livres audio lus en anglais. LibriSpeech multilingue inclus des langues supplémentaires, telles que l'allemand, le néerlandais, l'espagnol, le français, l'italien, le portugais et le polonais.
Il propose des enregistrements audio ainsi que des transcriptions alignées pour chaque langue. Le jeu de données fournit une ressource précieuse pour développer des systèmes et techniques TTS de synthèse vocale multilingue.
## VCTK (*Voice Cloning Toolkit*)
[VCTK](https://huggingface.co/datasets/vctk) est un jeu de données spécialement conçu pour la recherche et le développement de synthèse vocale.
Il contient des enregistrements audio de 110 anglophones avec divers accents. Chaque orateur lit environ 400 phrases, qui ont été choisies dans un journal, et un paragraphe d'élicitation utilisé pour les archives de l'accent de la parole.
C’est une ressource précieuse pour l’entraînement de modèles de TTS avec des voix et des accents variés, permettant une synthèse vocale plus naturelle et diversifiée.
## Libri-TTS/ LibriTTS-R
[Libri-TTS/ LibriTTS-R](https://huggingface.co/datasets/cdminix/libritts-r-aligned) est un corpus multi-locuteurs d'environ 585 heures de lecture de l'anglais, préparé par Heiga Zen avec l'aide des membres de l'équipe Google Speech et Google Brain. Le corpus LibriTTS est conçu pour la recherche. Il est dérivé des matériaux originaux (fichiers audio mp3 de LibriVox et fichiers texte du projet Gutenberg) du corpus LibriSpeech. Les principales différences par rapport au corpus LibriSpeech sont énumérées ci-dessous :
* Les fichiers audio sont à une fréquence d'échantillonnage de 24 kHz.
* Le discours est divisé lors des pauses de phrase.
* Les textes originaux et normalisés sont inclus.
* Des informations contextuelles (par exemple, des phrases voisines) peuvent être extraites.
* Les énoncés avec un bruit de fond important sont exclus.
La conception d'un bon jeu de données pour le TTS n'est pas une tâche facile car un tel jeu de données devrait posséder plusieurs caractéristiques clés :
* Enregistrements de haute qualité et diversifiés qui couvrent un large éventail de modèles de parole, d'accents, de langues et d'émotions. Les enregistrements doivent être clairs, exempts de bruit de fond et présenter des caractéristiques naturelles de la parole.
* Chaque enregistrement audio doit être accompagné de sa transcription textuelle correspondante.
* Le jeu de données doit contenir un large éventail de contenu linguistique, y compris différents types de phrases, de phrases et de mots. Il devrait couvrir divers sujets, genres et domaines pour assurer la capacité du modèle à gérer différents contextes linguistiques.
La bonne nouvelle est qu'il est peu probable que vous ayez à entraîner un modèle de TTS à partir de zéro. Dans la section suivante, nous examinerons les modèles pré-entraînés disponibles sur le *Hub*. | 0 |
0 | hf_public_repos/audio-transformers-course/chapters/fr | hf_public_repos/audio-transformers-course/chapters/fr/chapter6/introduction.mdx | # Unité 6 : Du texte à la parole
Dans l'unité précédente, vous avez appris à utiliser les *transformers* pour convertir la parole en texte. Maintenant, inversons le scénario et voyons comment vous pouvez transformer un texte d'entrée donné en une sortie audio qui ressemble à de la parole.
La tâche que nous allons étudier dans cette unité s'appelle "*Text-to-speech*" (TTS) ou synthèse vocale. Les modèles capables de gérer cette tâche ont un large éventail d'applications potentielles :
* Applications d'assistance : des outils permettant aux malvoyants d'accéder au contenu numérique par le biais du son.
* Narrations de livres audio : rendre la littérature plus accessible aux personnes qui préfèrent écouter ou qui ont des difficultés à lire.
* Assistants virtuels : les modèles TTS sont un élément fondamental des assistants virtuels tels que Siri, Google Assistant ou Amazon Alexa. Une fois qu'ils ont utilisé un modèle de classification pour repérer le mot déclencheur et un modèle ASR pour traiter la demande, ils peuvent utiliser un modèle de TTS pour répondre à votre demande.
* Divertissement, jeux et apprentissage des langues : donnez une voix à vos personnages, racontez des événements d’un jeu ou aidez les apprenants en leur donnant des exemples de prononciation et d'intonation correctes des mots et des phrases.
Il ne s'agit là que de quelques exemples, et je suis sûr que vous pouvez en imaginer beaucoup d'autres ! Il est important de souligner que les modèles de TTS peuvent être utilisés à des fins malveillantes.
Par exemple, avec suffisamment d'échantillons de voix, il serait possible potentiellement de créer de faux enregistrements audio convaincants, conduisant à l'utilisation non autorisée de la voix d'une personne à des fins frauduleuses ou de manipulation.
Si vous envisagez de collecter des données pour *finetuner* vos propres systèmes, examinez attentivement la question de la protection de la vie privée et du consentement.
Les données vocales doivent être obtenues avec le consentement explicite des personnes, en veillant à ce qu'elles comprennent l'objectif, la portée et les risques potentiels associés à l'utilisation de leur voix dans un système de synthèse vocale. Utilisez la synthèse vocale de manière responsable.
## Ce que vous allez apprendre et construire
Dans cette unité, nous parlerons de :
* [Jeux de données pour entraîner un modèle de synthèse vocale](tts_datasets)
* [Modèles pré-entraînés de synthèse vocale](pre-trained_models)
* [*Finetuning* du modèle SpeechT5 sur une nouvelle langue](fine-tuning)
* [Evaluation des modèles TTS](evaluation) | 1 |
0 | hf_public_repos/audio-transformers-course/chapters/fr | hf_public_repos/audio-transformers-course/chapters/fr/chapter6/fine-tuning.mdx | # Finetuning du SpeechT5
Maintenant que vous êtes familiarisé avec la tâche de synthèse vocale et le fonctionnement interne du SpeechT5 qui a été entraîné sur des données en langue anglaise, voyons comment nous pouvons le *finetuner* à une autre langue.
## Préparation
Assurez-vous de disposer d'un GPU si vous souhaitez reproduire cet exemple. Dans un *notebook*, vous pouvez vérifier avec la commande suivante :
```bash
nvidia-smi
```
<Tip warning={true}>
Dans notre exemple, nous utiliserons environ 40 heures de données d'entraînement. Si vous souhaitez suivre en utilisant la version gratuite de Google Colab, vous devrez réduire la quantité de données d'entraînement à environ 10-15 heures, ainsi que réduire le nombre d'étapes d'entraînement.
</Tip>
Vous aurez également besoin de quelques dépendances supplémentaires :
```bash
pip install transformers datasets soundfile speechbrain accelerate
```
Enfin, n'oubliez pas de vous connecter à votre compte Hugging Face pour pouvoir télécharger et partager votre modèle avec la communauté :
```py
from huggingface_hub import notebook_login
notebook_login()
```
## Le jeu de données
Pour cet exemple, nous prendrons le sous-ensemble en néerlandais (`nl`) du jeu de données [VoxPopuli](https://huggingface.co/datasets/facebook/voxpopuli).
[VoxPopuli](https://huggingface.co/datasets/facebook/voxpopuli) est un corpus de parole multilingue à grande échelle composé de données provenant d'enregistrements d'événements du Parlement européen de 2009 à 2020.
Il contient des données de transcription audio étiquetées pour 15 langues européennes. Nous utiliserons le sous-ensemble néerlandais, mais vous pouvez choisir un autre sous-ensemble.
Il s'agit d'un jeu de données de reconnaissance automatique de la parole, donc comme mentionné précédemment, ce n'est pas l'option la plus appropriée pour entraîner des modèles TTS. Cependant, il sera suffisant pour cet exercice.
Chargeons les données :
```python
from datasets import load_dataset, Audio
dataset = load_dataset("facebook/voxpopuli", "nl", split="train")
len(dataset)
```
**Sortie :**
```out
20968
```
20968 exemples devraient suffire pour un *finetuning*. SpeechT5 s'attend à ce que les données audio aient une fréquence d'échantillonnage de 16 kHz, assurez-vous donc que les exemples du jeu de données répondent à cette exigence :
```python
dataset = dataset.cast_column("audio", Audio(sampling_rate=16000))
```
## Prétraitement des données
Commençons par définir le *checkpoint* du modèle à utiliser et par charger le processeur approprié qui contient à la fois le *tokenizer* et l'extracteur de caractéristiques dont nous aurons besoin pour préparer les données en vue de l'entraînement :
```py
from transformers import SpeechT5Processor
checkpoint = "microsoft/speecht5_tts"
processor = SpeechT5Processor.from_pretrained(checkpoint)
```
### Nettoyage du texte pour la tokenisation de SpeechT5
Tout d'abord, pour préparer le texte, nous aurons besoin de la partie *tokenizer* du processeur, alors allons-y :
```py
tokenizer = processor.tokenizer
```
Prenons un exemple :
```python
dataset[0]
```
**Sortie :**
```out
{'audio_id': '20100210-0900-PLENARY-3-nl_20100210-09:06:43_4',
'language': 9,
'audio': {'path': '/root/.cache/huggingface/datasets/downloads/extracted/02ec6a19d5b97c03e1379250378454dbf3fa2972943504a91c7da5045aa26a89/train_part_0/20100210-0900-PLENARY-3-nl_20100210-09:06:43_4.wav',
'array': array([ 4.27246094e-04, 1.31225586e-03, 1.03759766e-03, ...,
-9.15527344e-05, 7.62939453e-04, -2.44140625e-04]),
'sampling_rate': 16000},
'raw_text': 'Dat kan naar mijn gevoel alleen met een brede meerderheid die wij samen zoeken.',
'normalized_text': 'dat kan naar mijn gevoel alleen met een brede meerderheid die wij samen zoeken.',
'gender': 'female',
'speaker_id': '1122',
'is_gold_transcript': True,
'accent': 'None'}
```
Vous remarquerez que les exemples du jeu de données contiennent les caractéristiques `raw_text` et `normalized_text`. Lorsque vous décidez quelle caractéristique utiliser comme entrée de texte, il est important de savoir que le *tokenizer* de SpeechT5 n'a pas de *tokens* pour les nombres. Dans `normalized_text`, les nombres sont écrits sous forme textuel. Il s'agit donc d'une meilleure option, et nous devrions utiliser `normalized_text` comme texte d'entrée.
Comme SpeechT5 a été entraîné sur la langue anglaise, il se peut qu'il ne reconnaisse pas certains caractères dans le jeu de données néerlandais.
S'ils sont laissés tels quels, ces caractères seront convertis en tokens `<unk>`. Cependant, en néerlandais, certains caractères comme `à` sont utilisés pour souligner les syllabes. Afin de préserver le sens du texte, nous pouvons remplacer ce caractère par un `a` normal.
Pour identifier les *tokens* non supportés, extrayez tous les caractères uniques du jeu de données en utilisant le `SpeechT5Tokenizer` qui fonctionne avec les caractères comme des *tokens*.
Pour ce faire, nous allons écrire la fonction `extract_all_chars` qui concatène les transcriptions de tous les exemples en une chaîne et la convertit en un ensemble de caractères.
Assurez-vous de mettre `batched=True` et `batch_size=-1` dans `dataset.map()` afin que toutes les transcriptions soient disponibles en même temps pour la fonction de *mapping*.
```py
def extract_all_chars(batch):
all_text = " ".join(batch["normalized_text"])
vocab = list(set(all_text))
return {"vocab": [vocab], "all_text": [all_text]}
vocabs = dataset.map(
extract_all_chars,
batched=True,
batch_size=-1,
keep_in_memory=True,
remove_columns=dataset.column_names,
)
dataset_vocab = set(vocabs["vocab"][0])
tokenizer_vocab = {k for k, _ in tokenizer.get_vocab().items()}
```
Vous disposez à présent de deux ensembles de caractères : l'un avec le vocabulaire du jeu de données et l'autre avec le vocabulaire du *tokenizer*.
Pour identifier les caractères non pris en charge dans le jeu de données, vous pouvez prendre la différence entre ces deux ensembles. L'ensemble résultant contiendra les caractères qui se trouvent dans le jeu de données mais pas dans le *tokenizer*.
```py
dataset_vocab - tokenizer_vocab
```
**Sortie :**
```out
{' ', 'à', 'ç', 'è', 'ë', 'í', 'ï', 'ö', 'ü'}
```
Pour gérer les caractères non supportés identifiés dans l'étape précédente, nous pouvons définir une fonction qui fait correspondre ces caractères à des *tokens* valides. Notez que les espaces sont déjà remplacés par `▁` dans le *tokenizer* et n'ont pas besoin d'être gérés séparément.
```py
replacements = [
("à", "a"),
("ç", "c"),
("è", "e"),
("ë", "e"),
("í", "i"),
("ï", "i"),
("ö", "o"),
("ü", "u"),
]
def cleanup_text(inputs):
for src, dst in replacements:
inputs["normalized_text"] = inputs["normalized_text"].replace(src, dst)
return inputs
dataset = dataset.map(cleanup_text)
```
Maintenant que nous avons traité les caractères spéciaux dans le texte, il est temps de se concentrer sur les données audio.
### Les locuteurs
Le jeu de données VoxPopuli comprend la parole de plusieurs locuteurs, mais combien de locuteurs sont représentés dans le jeu de données ? Pour le savoir, nous pouvons compter le nombre de locuteurs uniques et le nombre d'exemples que chaque locuteur apporte au jeu de données.
Avec un total de 20 968 exemples dans le jeu de données, cette information nous permettra de mieux comprendre la distribution des locuteurs et des exemples dans les données.
```py
from collections import defaultdict
speaker_counts = defaultdict(int)
for speaker_id in dataset["speaker_id"]:
speaker_counts[speaker_id] += 1
```
En traçant un histogramme, vous pouvez vous faire une idée de la quantité de données pour chaque locuteur.
```py
import matplotlib.pyplot as plt
plt.figure()
plt.hist(speaker_counts.values(), bins=20)
plt.ylabel("Speakers")
plt.xlabel("Examples")
plt.show()
```
<div class="flex justify-center">
<img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/transformers/tasks/tts_speakers_histogram.png" alt="Speakers histogram"/>
</div>
L'histogramme révèle qu'environ un tiers des locuteurs du jeu de données ont moins de 100 exemples, tandis qu'une dizaine de locuteurs ont plus de 500 exemples. Pour améliorer l'efficacité de l'entraînement et équilibrer le jeu de données, nous pouvons limiter les données aux locuteurs ayant entre 100 et 400 exemples.
```py
def select_speaker(speaker_id):
return 100 <= speaker_counts[speaker_id] <= 400
dataset = dataset.filter(select_speaker, input_columns=["speaker_id"])
```
Vérifions le nombre de locuteurs restants :
```py
len(set(dataset["speaker_id"]))
```
**Sortie :**
```out
42
```
Voyons combien d'exemples il reste :
```py
len(dataset)
```
**Sortie :**
```out
9973
```
Vous vous retrouvez avec un peu moins de 10 000 exemples provenant d'environ 40 locuteurs uniques, ce qui devrait être suffisant.
Notez que certains locuteurs ayant peu d'exemples peuvent en fait avoir plus d'audio disponible si les exemples sont longs. Cependant, pour déterminer la quantité totale d'audio pour chaque locuteur, il faut parcourir l'ensemble des données, ce qui est un processus long qui implique le chargement et le décodage de chaque fichier audio. C'est pourquoi nous avons choisi de sauter cette étape.
### Enchâssement des locuteurs
Pour permettre au modèle TTS de différencier plusieurs locuteurs, vous devrez créer un enchâssement de locuteur pour chaque exemple.
C'est est une entrée supplémentaire dans le modèle qui capture les caractéristiques de la voix d'un locuteur particulier.
Pour générer ces enchâssements, utilisez le modèle pré-entraîné [spkrec-xvect-voxceleb](https://huggingface.co/speechbrain/spkrec-xvect-voxceleb) de SpeechBrain.
Créez une fonction `create_speaker_embedding()` qui prend une forme d'onde audio en entrée et produit un vecteur de 512 éléments contenant l'enchâssement du locuteur correspondant.
```py
import os
import torch
from speechbrain.pretrained import EncoderClassifier
spk_model_name = "speechbrain/spkrec-xvect-voxceleb"
device = "cuda" if torch.cuda.is_available() else "cpu"
speaker_model = EncoderClassifier.from_hparams(
source=spk_model_name,
run_opts={"device": device},
savedir=os.path.join("/tmp", spk_model_name),
)
def create_speaker_embedding(waveform):
with torch.no_grad():
speaker_embeddings = speaker_model.encode_batch(torch.tensor(waveform))
speaker_embeddings = torch.nn.functional.normalize(speaker_embeddings, dim=2)
speaker_embeddings = speaker_embeddings.squeeze().cpu().numpy()
return speaker_embeddings
```
Il est important de noter que le modèle `speechbrain/spkrec-xvect-voxceleb` a été entraîné sur de l'anglais provenant du jeu de données VoxCeleb, alors que les exemples d'entraînement dans ce guide sont en néerlandais. Bien que nous pensions que ce modèle génèrerait toujours des enchâssements de locuteurs raisonnables pour notre jeu de données néerlandais, cette hypothèse peut ne pas être vraie dans tous les cas.
Pour obtenir des résultats optimaux, nous devrions d'abord entraîner un modèle de vecteur X sur le discours cible. Ainsi, le modèle sera mieux à même de capturer les caractéristiques vocales uniques présentes dans la langue néerlandaise. Si vous souhaitez entraîner votre propre modèle X-vector, vous pouvez utiliser [ce script](https://huggingface.co/mechanicalsea/speecht5-vc/blob/main/manifest/utils/prep_cmu_arctic_spkemb.py) comme exemple.
### Traitement du jeu de données
Enfin, traitons les données dans le format attendu par le modèle. Créez une fonction `prepare_dataset` qui prend un seul exemple et utilise l'objet `SpeechT5Processor` pour tokeniser le texte d'entrée et charger l'audio cible dans un spectrogramme log-mel.
Elle devrait également ajouter les enchâssements de locuteur en tant qu'entrée supplémentaire.
```py
def prepare_dataset(example):
audio = example["audio"]
example = processor(
text=example["normalized_text"],
audio_target=audio["array"],
sampling_rate=audio["sampling_rate"],
return_attention_mask=False,
)
# strip off the batch dimension
example["labels"] = example["labels"][0]
# use SpeechBrain to obtain x-vector
example["speaker_embeddings"] = create_speaker_embedding(audio["array"])
return example
```
Vérifiez que le traitement est correct en examinant un seul exemple :
```py
processed_example = prepare_dataset(dataset[0])
list(processed_example.keys())
```
**Sortie :**
```out
['input_ids', 'labels', 'stop_labels', 'speaker_embeddings']
```
Les enchâssements de locuteurs doivent être un vecteur de 512 éléments :
```py
processed_example["speaker_embeddings"].shape
```
**Sortie :**
```out
(512,)
```
Les étiquettes doivent être un spectrogramme log-mel avec 80 bins mel.
```py
import matplotlib.pyplot as plt
plt.figure()
plt.imshow(processed_example["labels"].T)
plt.show()
```
<div class="flex justify-center">
<img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/transformers/tasks/tts_logmelspectrogram_1.png" alt="Log-mel spectrogram with 80 mel bins"/>
</div>
Remarque : si vous trouvez ce spectrogramme déroutant, c'est peut-être parce que vous êtes familier avec la convention qui consiste à placer les basses fréquences en bas et les hautes fréquences en haut d'un graphique.
Cependant, lorsque l'on trace des spectrogrammes sous forme d'image à l'aide de la bibliothèque matplotlib, l'axe des ordonnées est inversé et les spectrogrammes apparaissent à l'envers.
Nous devons maintenant appliquer la fonction de traitement à l'ensemble du jeu de données. Cela prendra entre 5 et 10 minutes.
```py
dataset = dataset.map(prepare_dataset, remove_columns=dataset.column_names)
```
Vous verrez un avertissement indiquant que certains exemples du jeu de données sont plus longs que la longueur d'entrée maximale que le modèle peut traiter (600 *tokens*).
Supprimez ces exemples du jeu de données. Ici, nous allons encore plus loin et, pour tenir compte de la taille des batchs, nous supprimons tout ce qui dépasse 200 *tokens*.
```py
def is_not_too_long(input_ids):
input_length = len(input_ids)
return input_length < 200
dataset = dataset.filter(is_not_too_long, input_columns=["input_ids"])
len(dataset)
```
**Sortie :**
```out
8259
```
Next, create a basic train/test split:
```py
dataset = dataset.train_test_split(test_size=0.1)
```
### Assembleur de données
Afin de combiner plusieurs exemples dans un batch, vous devez définir un assembleur de données personnalisé. Il remplacera les séquences les plus courtes par des *tokens* de rembourrage, afin de s'assurer que tous les exemples ont la même longueur. Pour les étiquettes des spectrogrammes, les parties rembourrées sont remplacées par la valeur spéciale `-100`. Cette valeur spéciale indique au modèle d'ignorer cette partie du spectrogramme lors du calcul de la perte de spectrogramme.
```py
from dataclasses import dataclass
from typing import Any, Dict, List, Union
@dataclass
class TTSDataCollatorWithPadding:
processor: Any
def __call__(
self, features: List[Dict[str, Union[List[int], torch.Tensor]]]
) -> Dict[str, torch.Tensor]:
input_ids = [{"input_ids": feature["input_ids"]} for feature in features]
label_features = [{"input_values": feature["labels"]} for feature in features]
speaker_features = [feature["speaker_embeddings"] for feature in features]
# assembler les données d'entrée et les cibles dans un batch
batch = processor.pad(
input_ids=input_ids, labels=label_features, return_tensors="pt"
)
# remplacer le rembourrage par -100 pour ignorer correctement les pertes
batch["labels"] = batch["labels"].masked_fill(
batch.decoder_attention_mask.unsqueeze(-1).ne(1), -100
)
# non utilisé pendant le finetuning
del batch["decoder_attention_mask"]
# arrondir les longueurs cibles au multiple du facteur de réduction
if model.config.reduction_factor > 1:
target_lengths = torch.tensor(
[len(feature["input_values"]) for feature in label_features]
)
target_lengths = target_lengths.new(
[
length - length % model.config.reduction_factor
for length in target_lengths
]
)
max_length = max(target_lengths)
batch["labels"] = batch["labels"][:, :max_length]
# ajoutez également les enchâssements de locuteur
batch["speaker_embeddings"] = torch.tensor(speaker_features)
return batch
```
Dans SpeechT5, l'entrée de la partie décodeur du modèle est réduite d'un facteur 2. En d'autres termes, il élimine un pas de temps sur deux de la séquence cible. Le décodeur prédit alors une séquence deux fois plus longue. Étant donné que la longueur de la séquence cible d'origine peut être impaire, l'assembleur de données veille à arrondir la longueur maximale du batch à un multiple de 2.
```py
data_collator = TTSDataCollatorWithPadding(processor=processor)
```
## Entraîner le modèle
Chargez le modèle pré-entraîné à partir du même *checkpoint* que celui utilisé pour le chargement du processeur :
```py
from transformers import SpeechT5ForTextToSpeech
model = SpeechT5ForTextToSpeech.from_pretrained(checkpoint)
```
L'option `use_cache=True` est incompatible avec le *checkingpointing* du gradient. Désactivez-la pour l'entraînement, et réactivez le cache pour la génération afin d'accélérer le temps d'inférence :
```py
from functools import partial
# désactiver le cache pendant l'entraînement car il est incompatible avec le checkpointing du gradient
model.config.use_cache = False
# définir la langue et la tâche pour la génération et réactiver le cache
model.generate = partial(model.generate, use_cache=True)
```
Définir les arguments d'entraînement. Ici, nous ne calculons aucune métrique d'évaluation pendant le processus d'entraînement, nous parlerons d'évaluation plus tard dans ce chapitre. Au lieu de cela, nous ne nous intéresserons qu'à la perte :
```python
from transformers import Seq2SeqTrainingArguments
training_args = Seq2SeqTrainingArguments(
output_dir="speecht5_finetuned_voxpopuli_nl", # changer pour un nom de dépôt de votre choix
per_device_train_batch_size=4,
gradient_accumulation_steps=8,
learning_rate=1e-5,
warmup_steps=500,
max_steps=4000,
gradient_checkpointing=True,
fp16=True,
evaluation_strategy="steps",
per_device_eval_batch_size=2,
save_steps=1000,
eval_steps=1000,
logging_steps=25,
report_to=["tensorboard"],
load_best_model_at_end=True,
greater_is_better=False,
label_names=["labels"],
push_to_hub=True,
)
```
Instancie l'objet `Trainer` et lui passe le modèle, le jeu de données et l'assembleur de données.
```py
from transformers import Seq2SeqTrainer
trainer = Seq2SeqTrainer(
args=training_args,
model=model,
train_dataset=dataset["train"],
eval_dataset=dataset["test"],
data_collator=data_collator,
tokenizer=processor,
)
```
Et voilà, nous sommes prêts à commencer l'entraînement ! Il prendra plusieurs heures. En fonction de votre GPU, il est possible que vous rencontriez une erreur CUDA "out-of-memory" lorsque vous commencez l'entraînement. Dans ce cas, vous pouvez réduire la taille de `per_device_train_batch_size` par incréments d'un facteur 2 et augmenter `gradient_accumulation_steps` par 2 pour compenser.
```py
trainer.train()
```
Pousser le modèle final sur le *Hub* :
```py
trainer.push_to_hub()
```
## Inférence
Une fois que vous avez *finetuné* un modèle, vous pouvez l'utiliser pour l'inférence ! Chargez le modèle à partir du *Hub* (assurez-vous d'utiliser votre nom de compte dans l'extrait de code suivant) :
```py
model = SpeechT5ForTextToSpeech.from_pretrained(
"YOUR_ACCOUNT/speecht5_finetuned_voxpopuli_nl"
)
```
Choisissez un exemple, ici nous en prendrons un du jeu de données de test. Obtenez un enchâssement du locuteur.
```py
example = dataset["test"][304]
speaker_embeddings = torch.tensor(example["speaker_embeddings"]).unsqueeze(0)
```
Définir un texte d'entrée et le *tokeniser*.
```py
text = "hallo allemaal, ik praat nederlands. groetjes aan iedereen!"
```
Prétraiter le texte d'entrée :
```py
inputs = processor(text=text, return_tensors="pt")
```
Installez un vocodeur et générez de la parole :
```py
from transformers import SpeechT5HifiGan
vocoder = SpeechT5HifiGan.from_pretrained("microsoft/speecht5_hifigan")
speech = model.generate_speech(inputs["input_ids"], speaker_embeddings, vocoder=vocoder)
```
On écoute le résultat ?
```py
from IPython.display import Audio
Audio(speech.numpy(), rate=16000)
```
Il peut être difficile d'obtenir des résultats satisfaisants avec ce modèle dans une nouvelle langue. La qualité de l'enchâssement du locuteur peut être un facteur important. Comme SpeechT5 a été entraîné avec des X-vectors anglais, il donne de meilleurs résultats lorsqu'il utilise des enchâssements de locuteurs anglais. Si la synthèse vocale semble médiocre, essayez d'utiliser un autre enchâssement de locuteur.
L'augmentation de la durée d'entraînement est également susceptible d'améliorer la qualité des résultats. Malgré cela, le discours est clairement néerlandais et non anglais, et il capture les caractéristiques vocales du locuteur (comparez avec l'audio original dans l'exemple).
Une autre chose à expérimenter est la configuration du modèle. Par exemple, essayez d'utiliser `config.reduction_factor = 1` pour voir si cela améliore les résultats.
Dans la section suivante, nous verrons comment nous évaluons les modèles de synthèse vocale.
| 2 |
0 | hf_public_repos/audio-transformers-course/chapters/fr | hf_public_repos/audio-transformers-course/chapters/fr/chapter6/hands_on.mdx | # Exercice pratique
Dans cette unité, nous avons exploré la tâche audio de synthèse vocale, parlé des jeux de données existants, des modèles pré-entraînés et des nuances du *finetuning* de SpeechT5 pour une nouvelle langue.
Comme vous l'avez vu, le *finetuning* des modèles pour la tâche de synthèse vocale peut s'avérer difficile dans des scénarios à faibles ressources. Parallèlement, l'évaluation des modèles de synthèse vocale n'est pas facile non plus.
Pour ces raisons, cet exercice pratique se concentrera sur la mise en pratique des compétences plutôt que sur l'obtention d'une certaine valeur de métrique.
Votre objectif pour cette tâche est de *finetuner* SpeechT5 sur un jeu de données de votre choix. Vous pouvez choisir une autre langue dans le même jeu de données `voxpopuli`, ou tout autre jeu de données listé dans cette unité.
Faites attention à la taille des données d'entraînement ! Pour s'entraîner sur un GPU gratuit de Google Colab, nous recommandons de limiter les données d'entraînement à environ 10-15 heures.
Une fois que vous avez terminé le processus de *finetuné*, partagez votre modèle en le téléchargeant sur le *Hub*. Assurez-vous de marquer votre modèle comme un modèle `text-to-speech` soit avec les kwargs appropriés, soit dans l'interface utilisateur du *Hub*.
Rappelez-vous, le but principal de cet exercice est de vous fournir une grande pratique, vous permettant d'affiner vos compétences et d'acquérir une compréhension plus profonde des tâches audio de synthèse vocale. | 3 |
0 | hf_public_repos/audio-transformers-course/chapters | hf_public_repos/audio-transformers-course/chapters/en/_toctree.yml | - title: Unit 0. Welcome to the course!
sections:
- local: chapter0/introduction
title: What to expect from the course
- local: chapter0/get_ready
title: Get ready
- local: chapter0/community
title: Join the community
- title: Unit 1. Working with audio data
sections:
- local: chapter1/introduction
title: What you'll learn
- local: chapter1/audio_data
title: Introduction to audio data
- local: chapter1/load_and_explore
title: Load and explore an audio dataset
- local: chapter1/preprocessing
title: Preprocessing audio data
- local: chapter1/streaming
title: Streaming audio data
- local: chapter1/quiz
title: Quiz
quiz: 1
- local: chapter1/supplemental_reading
title: Supplemental reading and resources
- title: Unit 2. A gentle introduction to audio applications
sections:
- local: chapter2/introduction
title: Tour of audio applications
- local: chapter2/audio_classification_pipeline
title: Audio classification with a pipeline
- local: chapter2/asr_pipeline
title: Automatic speech recognition with a pipeline
- local: chapter2/tts_pipeline
title: Audio generation with a pipeline
- local: chapter2/hands_on
title: Hands-on exercise
- title: Unit 3. Transformer architectures for audio
sections:
- local: chapter3/introduction
title: Refresher on transformer models
- local: chapter3/ctc
title: CTC architectures
- local: chapter3/seq2seq
title: Seq2Seq architectures
- local: chapter3/classification
title: Audio classification architectures
- local: chapter3/quiz
title: Quiz
quiz: 3
- local: chapter3/supplemental_reading
title: Supplemental reading and resources
- title: Unit 4. Build a music genre classifier
sections:
- local: chapter4/introduction
title: What you'll learn and what you'll build
- local: chapter4/classification_models
title: Pre-trained models for audio classification
- local: chapter4/fine-tuning
title: Fine-tuning a model for music classification
- local: chapter4/demo
title: Build a demo with Gradio
- local: chapter4/hands_on
title: Hands-on exercise
- title: Unit 5. Automatic Speech Recognition
sections:
- local: chapter5/introduction
title: What you'll learn and what you'll build
- local: chapter5/asr_models
title: Pre-trained models for speech recognition
- local: chapter5/choosing_dataset
title: Choosing a dataset
- local: chapter5/evaluation
title: Evaluation and metrics for speech recognition
- local: chapter5/fine-tuning
title: How to fine-tune an ASR system with the Trainer API
- local: chapter5/demo
title: Building a demo
- local: chapter5/hands_on
title: Hands-on exercise
- local: chapter5/supplemental_reading
title: Supplemental reading and resources
#
- title: Unit 6. From text to speech
sections:
- local: chapter6/introduction
title: What you'll learn and what you'll build
- local: chapter6/tts_datasets
title: Text-to-speech datasets
- local: chapter6/pre-trained_models
title: Pre-trained models for text-to-speech
- local: chapter6/fine-tuning
title: Fine-tuning SpeechT5
- local: chapter6/evaluation
title: Evaluating text-to-speech models
- local: chapter6/hands_on
title: Hands-on exercise
- local: chapter6/supplemental_reading
title: Supplemental reading and resources
- title: Unit 7. Putting it all together
sections:
- local: chapter7/introduction
title: What you'll learn and what you'll build
- local: chapter7/speech-to-speech
title: Speech-to-speech translation
- local: chapter7/voice-assistant
title: Creating a voice assistant
- local: chapter7/transcribe-meeting
title: Transcribe a meeting
- local: chapter7/hands_on
title: Hands-on exercise
- local: chapter7/supplemental_reading
title: Supplemental reading and resources
- title: Unit 8. Finish line
sections:
- local: chapter8/introduction
title: Congratulations!
- local: chapter8/certification
title: Get your certificate of completion
- title: Course Events
sections:
- local: events/introduction
title: Live sessions and workshops | 4 |
0 | hf_public_repos/audio-transformers-course/chapters/en | hf_public_repos/audio-transformers-course/chapters/en/chapter0/get_ready.mdx | # Get ready to take the course
We hope that you are excited to get started with the course, and we have designed this page to make sure you have
everything you need to jump right in!
## Step 1. Sign up
To stay up to date with all the updates and special social events, sign up to the course.
[👉 SIGN UP](http://eepurl.com/insvcI)
## Step 2. Get a Hugging Face account
If you don't yet have one, create a Hugging Face account (it's free). You'll need it to complete hands-on tasks, to
receive your certificate of completion, to explore pre-trained models, to access datasets and more.
[👉 CREATE HUGGING FACE ACCOUNT](https://huggingface.co/join)
## Step 3. Brush up on fundamentals (if you need to)
We assume that you are familiar with deep learning basics, and have general familiarity with transformers. If you need
to brush up on your understanding of transformers, check out our [NLP Course](https://huggingface.co/course/chapter1/1).
## Step 4. Check your setup
To go through the course materials you will need:
- A computer with an internet connection
- [Google Colab](https://colab.research.google.com) for hands-on exercises. The free version is enough. If you have never used Google Colab before, check out this [official introduction notebook](https://colab.research.google.com/notebooks/intro.ipynb).
<Tip>
As an alternative to the free tier of Google Colab, you can use your own local setup, or Kaggle Notebooks. Kaggle Notebooks
offer a fixed number of GPU hours and have similar functionality to Google Colab, however, there are differences when it
comes to sharing your models on 🤗 Hub (e.g. for completing assignments). If you decide to use Kaggle Notebooks as your
tool of choice, check out the [example Kaggle notebook](https://www.kaggle.com/code/michaelshekasta/test-notebook) created by
[@michaelshekasta](https://github.com/michaelshekasta). This notebook illustrates how you can train and share your
trained model on 🤗 Hub.
</Tip>
## Step 5. Join the community
Sign up to our Discord server, the place where you can exchange ideas with your classmates and reach out to us (the Hugging Face team).
[👉 JOIN THE COMMUNITY ON DISCORD](http://hf.co/join/discord)
To learn more about our community on Discord and how to make the most of it, check out the [next page](community).
| 5 |
0 | hf_public_repos/audio-transformers-course/chapters/en | hf_public_repos/audio-transformers-course/chapters/en/chapter0/introduction.mdx | # Welcome to the Hugging Face Audio course!
Dear learner,
Welcome to this course on using transformers for audio. Time and again transformers have proven themselves as one of the
most powerful and versatile deep learning architectures, capable of achieving state-of-the-art results in a wide range of
tasks, including natural language processing, computer vision, and more recently, audio processing.
In this course, we will explore how transformers can be applied to audio data. You'll learn how to use them to tackle a
range of audio-related tasks. Whether you are interested in speech recognition, audio classification, or generating speech
from text, transformers and this course have got you covered.
To give you a taste of what these models can do, say a few words in the demo below and watch the model transcribe it in real-time!
<iframe
src="https://openai-whisper.hf.space"
frameborder="0"
width="850"
height="450">
</iframe>
Throughout the course, you will gain an understanding of the specifics of working with audio data, you'll learn about different
transformer architectures, and you'll train your own audio transformers leveraging powerful pre-trained models.
This course is designed for learners with a background in deep learning, and general familiarity with transformers.
No expertise in audio data processing is required. If you need to brush up on your understanding of transformers, check out
our [NLP Course](https://huggingface.co/course/chapter1/1) that goes into much detail on the transformer basics.
## Meet the course team
**Sanchit Gandhi, Machine Learning Research Engineer at Hugging Face**
Hi! I'm Sanchit and I'm a machine learning research engineer for audio in the open-source team at Hugging Face 🤗.
My primary focus is automatic speech recognition and translation, with the current goal of making speech models faster,
lighter and easier to use.
**Matthijs Hollemans, Machine Learning Engineer at Hugging Face**
I'm Matthijs, and I'm a machine learning engineer for audio in the open source team at Hugging Face. I'm also the author
of a book on how to write sound synthesizers, and I create audio plug-ins in my spare time.
**Maria Khalusova, Documentation & Courses at Hugging Face**
I'm Maria, and I create educational content and documentation to make Transformers and other open-source tools even more
accessible. I break down complex technical concepts and help folks get started with cutting-edge technologies.
**Vaibhav Srivastav, ML Developer Advocate Engineer at Hugging Face**
I'm Vaibhav (VB) and I'm a Developer Advocate Engineer for Audio in the Open Source team at Hugging Face. I research
low-resource Text to Speech and help bring SoTA speech research to the masses.
## Course structure
The course is structured into several units that covers various topics in depth:
* [Unit 1](https://huggingface.co/learn/audio-course/chapter1): learn about the specifics of working with audio data, including audio processing techniques and data preparation.
* [Unit 2](https://huggingface.co/learn/audio-course/chapter2): get to know audio applications and learn how to use 🤗 Transformers pipelines for different tasks, such as
audio classification and speech recognition.
* [Unit 3](https://huggingface.co/learn/audio-course/chapter3): explore audio transformer architectures, learn how they differ, and what tasks they are best suited for.
* [Unit 4](https://huggingface.co/learn/audio-course/chapter4): learn how to build your own music genre classifier.
* [Unit 5](https://huggingface.co/learn/audio-course/chapter5): delve into speech recognition and build a model to transcribe meeting recordings.
* [Unit 6](https://huggingface.co/learn/audio-course/chapter6): learn how to generate speech from text.
* [Unit 7](https://huggingface.co/learn/audio-course/chapter7): learn how to build real-world audio applications with transformers.
Each unit includes a theoretical component, where you will gain a deep understanding of the underlying concepts and
techniques. Throughout the course, we provide quizzes to help you test your knowledge and reinforce your learning.
Some chapters also include hands-on exercises, where you will have the opportunity to apply what you have learned.
By the end of the course, you will have a strong foundation in using transformers for audio data and will be
well-equipped to apply these techniques to a wide range of audio-related tasks.
The course units will be released in several consecutive blocks with the following publishing schedule:
| Units | Publishing date |
|---|-----------------|
| Unit 0, Unit 1, and Unit 2 | June 14, 2023 |
| Unit 3, Unit 4 | June 21, 2023 |
| Unit 5 | June 28, 2023 |
| Unit 6 | July 5, 2023 |
| Unit 7, Unit 8 | July 12, 2023 |
## Learning paths and certification
There is no right or wrong way to take this course. All the materials in this course are 100% free, public and open-source.
You can take the course at your own pace, however, we recommend going through the units in their order.
If you'd like to get certified upon the course completion, we offer two options:
| Certificate type | Requirements |
|---|--------------------------------------------------------------------|
| Certificate of completion | Complete 80% of the hands-on exercises according to instructions. |
| Certificate of honors | Complete 100% of the hands-on exercises according to instructions. |
Each hands-on exercise outlines its completion criteria. Once you have completed enough hands-on exercises to qualify
for either of the certificates, refer to the last unit of the course to learn how you can get your certificate. Good luck!
## Sign up to the course
The units of this course will be released gradually over the course of a few weeks. We encourage you to sign up to the
course updates so that you don't miss new units when they are released. Learners who sign up to the course updates will
also be the first ones to learn about special social events that we plan to host.
[SIGN UP](http://eepurl.com/insvcI)
Enjoy the course!
| 6 |
0 | hf_public_repos/audio-transformers-course/chapters/en | hf_public_repos/audio-transformers-course/chapters/en/chapter0/community.mdx | # Join the community!
We invite you to [join our vibrant and supportive community on Discord](http://hf.co/join/discord). You will have the opportunity to connect with
like-minded learners, exchange ideas, and get valuable feedback on your hands-on exercises. You can ask questions,
share resources, and collaborate with others.
Our team is also active on Discord, and they are available to provide support and guidance when you need
it. Joining our community is an excellent way to stay motivated, engaged, and connected, and we look forward to seeing
you there!
## What is Discord?
Discord is a free chat platform. If you've used Slack, you'll find it quite similar. The Hugging Face Discord server
is a home to a thriving community of 18 000 AI experts, learners and enthusiasts that you can be a part of.
## Navigating Discord
Once you sign up to our Discord server, you'll need to pick the topics you're interested in by clicking `#role-assignment`
at the left. You can choose as many different categories as you like. To join other learners of this course, make sure
to click "ML for Audio and Speech".
Explore the channels and share a few things about you in the `#introduce-yourself` channel.
## Audio course channels
There are many channels focused on various topics on our Discord server. You'll find people discussing papers, organizing
events, sharing their projects and ideas, brainstorming, and so much more.
As an audio course learner, you may find the following set of channels particularly relevant:
* `#audio-announcements`: updates about the course, news from the Hugging Face related to everything audio, event announcements, and more.
* `#audio-study-group`: a place to exchange ideas, ask questions about the course and start discussions.
* `#audio-discuss`: a general place to have discussions about things related to audio.
In addition to joining the `#audio-study-group`, feel free to create your own study group, learning together is always easier!
| 7 |
0 | hf_public_repos/audio-transformers-course/chapters/en | hf_public_repos/audio-transformers-course/chapters/en/events/introduction.mdx | # Live sessions and workshops
New Audio Transformers Course: Live Launch Event with Paige Bailey (DeepMind), Seokhwan Kim (Amazon Alexa AI), and Brian McFee (Librosa)
<Youtube id="wqkKResXWB8"/>
The recording of a Live AMA with the Hugging Face Audio course team:
<Youtube id="fbONSVoUneQ"/>
| 8 |
0 | hf_public_repos/audio-transformers-course/chapters/en | hf_public_repos/audio-transformers-course/chapters/en/chapter8/certification.mdx | # Get your certificate of completion
The certification process is completely free.
* To get a certificate of completion: you need to pass 3 out of 4 hands-on assignments.
* To get a certificate of excellence: you need to pass 4 out of 4 hands-on assignments.
The requirements for each assignment are listed in the respective units:
* [Unit 4 Hands-on](../chapter4/hands_on)
* [Unit 5 Hands-on](../chapter5/hands_on)
* [Unit 6 Hands-on](../chapter6/hands_on)
* [Unit 7 Hands-on](../chapter7/hands_on)
For the assignments that require to train a model, make sure to push your model that meets the requirements to Hub with relevant `kwargs`.
For the demo assignment in Unit 7, make sure that your demo is `public`.
For self-evaluation and to see what units you passed/not passed, you can use the following space:
[Check My Progress - Audio Course](https://huggingface.co/spaces/MariaK/Check-my-progress-Audio-Course)
Once you qualify for a certificate, go to the [Audio Course Certification](https://huggingface.co/spaces/MariaK/Audio-Course-Certification) space.
This space implements additional checks to ensure your submissions meet the assessment criteria.
Type your Hugging Face username, your first name, last name in the text fields and click on the "Check if I pass and get the certificate" button.
If you passed 3 out of 4 hands-on assignments, you will receive the certificate of completion.
If you passed 4 out of 4 hands-on assignments, you will receive the certificate of excellence.
You can download your certificate in pdf format and png format. Don't hesitate to share your certificate on Twitter
(tag me @mariakhalusova and @huggingface) and on LinkedIn.
If you do not meet the certification criteria, don't be discouraged! Go back to the
[Check My Progress - Audio Course](https://huggingface.co/spaces/MariaK/Check-my-progress-Audio-Course) space to see which
units you need to do again to get your certificate. If you are experiencing any issue with either of the spaces,
let us know!
| 9 |
0 | hf_public_repos/audio-transformers-course/chapters/bn | hf_public_repos/audio-transformers-course/chapters/bn/chapter1/quiz.mdx | <!-- DISABLE-FRONTMATTER-SECTIONS -->
# কোর্সের উপাদান সম্পর্কে আপনার উপলব্ধি পরীক্ষা করুন
### ১. Sampling Rate কোন এককে পরিমাপ করা হয়?
<Question
choices={[
{
text: "dB",
explain: "না, amplitude ডেসিবেলে (dB) পরিমাপ করা হয়।"
},
{
text: "Hz",
explain: "Sampling Rate হল এক সেকেন্ডে নেওয়া নমুনার সংখ্যা এবং হার্টজ (Hz) এ পরিমাপ করা হয়।",
correct: true
},
{
text: "bit",
explain: "bit, bit depth বর্ণনা করতে ব্যবহৃত হয়, যা একটি অডিও সংকেতের প্রতিটি নমুনাকে উপস্থাপন করতে ব্যবহৃত তথ্যের বিটের সংখ্যা বোঝায়।",
}
]}
/>
### ২. একটি বড় অডিও ডেটাসেট স্ট্রিম করার সময়, আপনি কত তাড়াতাড়ি এটি ব্যবহার করা শুরু করতে পারেন?
<Question
choices={[
{
text: "সম্পূর্ণ ডেটাসেট ডাউনলোড হওয়ার সাথে সাথে।",
explain: "ডেটা স্ট্রিমিংয়ের লক্ষ্য হল একটি ডেটাসেট সম্পূর্ণরূপে ডাউনলোড না করেই এটির সাথে কাজ করতে সক্ষম হওয়া।"
},
{
text: "যত তাড়াতাড়ি প্রথম ১৬টি উদাহরণ ডাউনলোড করা হয়।",
explain: "আবার চেষ্টা করুন!"
},
{
text: "যত তাড়াতাড়ি প্রথম উদাহরণ ডাউনলোড করা হয়।",
explain: "",
correct: true
}
]}
/>
### ৩. spectrogram কি ?
<Question
choices={[
{
text: "অডিও ডিজিটাইজ করতে ব্যবহৃত একটি ডিভাইস যা প্রথমে একটি মাইক্রোফোন দ্বারা ক্যাপচার করা হয়, যা শব্দ তরঙ্গকে বৈদ্যুতিক সংকেতে রূপান্তরিত করে।",
explain: "এই ধরনের বৈদ্যুতিক সংকেত ডিজিটাইজ করার জন্য ব্যবহৃত একটি ডিভাইসকে বলা হয় অ্যানালগ-টু-ডিজিটাল কনভার্টার। আবার চেষ্টা কর!"
},
{
text: "একটি প্লট যা দেখায় কিভাবে একটি অডিও সংকেতের amplitude সময়ের সাথে পরিবর্তিত হয়। এটি শব্দের *time domain* উপস্থাপনা হিসাবেও পরিচিত।",
explain: "উপরের বর্ণনাটি waveform বোঝায়, spectrogram নয়।"
},
{
text: "একটি সিগন্যালের ফ্রিকোয়েন্সি বর্ণালীর একটি ভিজ্যুয়াল উপস্থাপনা কারণ এটি সময়ের সাথে পরিবর্তিত হয়।",
explain: "",
correct: true
}
]}
/>
### ৪. Whisper দ্বারা প্রত্যাশিত লগ-মেল স্পেকট্রোগ্রামে, কাঁচা অডিও ডেটা রূপান্তর করার সবচেয়ে সহজ উপায় কী?
A.
```python
librosa.feature.melspectrogram(audio["array"])
```
B.
```python
feature_extractor = WhisperFeatureExtractor.from_pretrained("openai/whisper-small")
feature_extractor(audio["array"])
```
C.
```python
dataset.feature(audio["array"], model="whisper")
```
<Question
choices={[
{
text: "A",
explain: "`librosa.feature.melspectrogram()` পাওয়ার স্পেক্ট্রোগ্রাম তৈরী করে।"
},
{
text: "B",
explain: "",
correct: true
},
{
text: "C",
explain: "ডেটাসেট ট্রান্সফরমার মডেলের জন্য ফিচারস প্রস্তুত করে না, এটি মডেলের প্রিপ্রসেসর দ্বারা করা হয়।"
}
]}
/>
### ৫. আপনি কিভাবে 🤗 হাব থেকে একটি ডেটাসেট লোড করবেন?
A.
```python
from datasets import load_dataset
dataset = load_dataset(DATASET_NAME_ON_HUB)
```
B.
```python
import librosa
dataset = librosa.load(PATH_TO_DATASET)
```
C.
```python
from transformers import load_dataset
dataset = load_dataset(DATASET_NAME_ON_HUB)
```
<Question
choices={[
{
text: "A",
explain: "সবচেয়ে ভালো উপায় হল 🤗 ডেটাসেট লাইব্রেরি ব্যবহার করা।",
correct: true
},
{
text: "B",
explain: "Librosa.load স্যাম্পলিং রেট সহ একটি পাথ থেকে একটি পৃথক অডিও ফাইল লোড করতে উপযোগী, কিন্তু অনেক উদাহরণ এবং একাধিক বৈশিষ্ট্য সহ একটি সম্পূর্ণ ডেটাসেট নয়।"
},
{
text: "C",
explain: "load_dataset পদ্ধতি 🤗 ডেটাসেট লাইব্রেরিতে আসে, 🤗 ট্রান্সফরমারে নয়।"
}
]}
/>
### ৬. আপনার কাস্টম ডেটাসেটে ৩২ kHz sampling rate সহ উচ্চ-মানের অডিও রয়েছে৷ আপনি একটি speech recognition মডেলকে train করতে চান যা আশা করে যে অডিও উদাহরণগুলির একটি ১৬ খজ sampling rate থাকবে। তোমার কি করা উচিত?
<Question
choices={[
{
text: "উদাহরণগুলি যেমন ব্যবহার করুন, মডেলটি সহজেই উচ্চ মানের অডিও উদাহরণে সাধারণীকরণ করবে।",
explain: "attention mechanism এর উপর নির্ভরতার কারণে, মডেলগুলির জন্য নমুনা হারের মধ্যে সাধারণীকরণ করা চ্যালেঞ্জিং।"
},
{
text: "কাস্টম ডেটাসেটের উদাহরণগুলি ডাউন নমুনা করতে 🤗 ডেটাসেট লাইব্রেরি থেকে Audio মডিউল ব্যবহার করুন। ",
explain: "",
correct: true
},
{
text: "প্রতি দ্বিতীয় নমুনা ফেলে দিয়ে একটি গুণনীয়ক ২x দ্বারা ডাউন নমুনা।",
explain: "এটি সিগন্যালে বিকৃতি তৈরি করবে। সঠিকভাবে পুনরায় নমুনা তৈরি করা কঠিন এবং ভাল-পরীক্ষিত লাইব্রেরি যেমন librosa বা 🤗 datasets গুলোর উপরে ছেড়ে দেওয়া ভালো।"
}
]}
/>
### ৭. আপনি কিভাবে একটি মেশিন লার্নিং মডেল দ্বারা উত্পন্ন একটি spectrogram কে একটি waveform এ রূপান্তর করতে পারেন?
<Question
choices={[
{
text: "আমরা স্পেকট্রোগ্রাম থেকে একটি waveform পুনর্গঠন করতে vocoder নামে একটি নিউরাল নেটওয়ার্ক ব্যবহার করতে পারি।",
explain: "যেহেতু এই ক্ষেত্রে ফেজের তথ্য অনুপস্থিত, তাই তরঙ্গরূপ পুনর্গঠনের জন্য আমাদের একটি vocodder বা ক্লাসিক Griffin-Lim অ্যালগরিদম ব্যবহার করতে হবে।",
correct: true
},
{
text: "উৎপন্ন স্পেকট্রোগ্রামকে তরঙ্গরূপে রূপান্তর করতে আমরা inverse STFT ব্যবহার করতে পারি। ",
explain: "একটি জেনারেট করা spectrogram এ ফেজ তথ্য অনুপস্থিত রয়েছে যা inverse STFT ব্যবহার করার জন্য প্রয়োজন।"
},
{
text: "আপনি একটি মেশিন লার্নিং মডেল দ্বারা উত্পন্ন একটি spectrogram কে waveform এ রূপান্তর করতে পারবেন না।",
explain: "আবার চেষ্টা করুন!"
}
]}
/>
| 0 |
0 | hf_public_repos/audio-transformers-course/chapters/bn | hf_public_repos/audio-transformers-course/chapters/bn/chapter1/introduction.mdx | # অধ্যায় ১. অডিও ডাটার সাথে কাজ করার পদ্ধতিসমূহ
## এই অধ্যায় থেকে তুমি কি কি শিখবে?
প্রতিটি অডিও বা speech সংক্রান্ত কাজ একটি অডিও ফাইল দিয়ে শুরু হয়। আমরা এই কাজগুলি সমাধান করার জন্য ডুব দিতে পারার আগে, এই ফাইলগুলি
আসলে কী ধারণ করে এবং কীভাবে তাদের সাথে কাজ করতে হয় তা বোঝা খুবই গুরুত্বপূর্ণ।
এই অধ্যায়ে, আপনি তরঙ্গরূপ সহ অডিও ডেটা সম্পর্কিত sampling rate, spectrogram এর মতন মৌলিক পরিভাষাগুলির একটি উপলব্ধি লাভ করবেন,
এহকারাও আপনি অডিও লোডিং এবং প্রিপ্রসেসিং সহ অডিও ডেটাসেটগুলির সাথে কীভাবে কাজ করবেন তাও শিখবেন এবং কীভাবে বড় ডেটাসেটগুলি দক্ষতার সাথে স্ট্রিম করা যায়।
এই অধ্যায়ের শেষে, আপনি প্রয়োজনীয় audio ডেটা সম্পর্কিত পরিভাষাগুলির একটি শক্তিশালী উপলব্ধি করতে পারবেন এবং
বিভিন্ন অ্যাপ্লিকেশনের জন্য অডিও ডেটাসেটের সাথে কাজ করার জন্য প্রয়োজনীয় দক্ষতা অর্জন করবেন। এই অধ্যায় এ আপনি যে জ্ঞান অর্জন করবেন তা পাঠক্রমের অবশিষ্টাংশ বোঝার জন্য একটি ভিত্তি স্থাপন করবে। | 1 |
0 | hf_public_repos/audio-transformers-course/chapters/bn | hf_public_repos/audio-transformers-course/chapters/bn/chapter1/audio_data.mdx | # অডিও ডাটার সাথে পরিচয়
প্রকৃতপক্ষে, একটি শব্দ তরঙ্গ একটি অবিচ্ছিন্ন সংকেত(continious signal), যার অর্থ এটি একটি নির্দিষ্ট সময়ে অসীম সংখ্যক সংকেত মান ধারণ করে। এটি ডিজিটাল
ডিভাইসগুলির জন্য সমস্যা তৈরি করে যাদের সীমিত সংরক্ষণাগার আছে। তাই এই অবিচ্ছিন্ন সংকেতকে (continious signal) বিযুক্ত মানগুলির একটি সিরিজে(discrete values)
রূপান্তর করতে হবে যাতে এটি ডিজিটাল ডিভাইস দ্বারা প্রক্রিয়াকরণ, সংরক্ষণ এবং প্রেরণ করা যায়।
আপনি যদি কোনও অডিও ডেটাসেট দেখেন, আপনি টেক্সট বর্ণনা বা সঙ্গীতের মতো শব্দের উদ্ধৃতি সহ ডিজিটাল ফাইলগুলি খুঁজে পাবেন।
আপনি বিভিন্ন ফাইল ফরম্যাটের সম্মুখীন হতে পারেন যেমন `.wav` (ওয়েভফর্ম অডিও ফাইল), `.flac` (ফ্রি লসলেস অডিও কোডেক)
এবং `.mp3` (MPEG-1 অডিও লেয়ার 3)। এই ফর্ম্যাটগুলির মধ্যে মূল পার্থক্য হলো তারা কিভাবে অডিও সিগন্যালের ডিজিটাল উপস্থাপনাকে সংকুচিত করে তার মধ্যে।
চলুন এক নজরে দেখে নেওয়া যাক কিভাবে আমরা একটি ক্রমাগত সংকেত(continious signal) থেকে বিযুক্ত মানগুলির একটি সিরিজে(discrete values)
রূপান্তর করবো। অ্যানালগ সংকেত কে প্রথমে একটি মাইক্রোফোন দ্বারা ক্যাপচার করা হয়, তারপর ক্যাপচার করা শব্দ তরঙ্গকে বৈদ্যুতিক সংকেতে রূপান্তরিত করা হয়।
বৈদ্যুতিক সংকেত কে তারপর Analog-to-Digital Converter এর দ্বারা Sampling প্রক্রিয়ার মাদ্ধমে ডিজিটাইজ করা। এইভাবে আমরা একটি অ্যানালগ
সংকেত থেকে বিযুক্ত মানগুলির সিরিজ অথবা discrete values পেয়ে থাকি।
## Sampling এবং sampling rate
কোনো এক continious signal এর একক নির্দিষ্ট সময়ের মান বের করার প্রক্রিয়া কে Sampling বলে। যথাক্রমে sampled waveform, _discrete_ হয়,
কারণ _discrete_ waveform টির অভিন্ন বিরতিতে একটি সীমিত সংখ্যক সংকেত মান রয়েছে।
<div class="flex justify-center">
<img src="https://huggingface.co/datasets/huggingface-course/audio-course-images/resolve/main/Signal_Sampling.png" alt="Signal sampling illustration">
</div>
*উইকিপিডিয়া নিবন্ধ থেকে দৃষ্টান্ত: [Sampling (signal processing)](https://en.wikipedia.org/wiki/Sampling_(signal_processing))*
**Sampling Rate** (এটিকে Sampling Frequency ও বলা হয়) হল কোনো continious signal কে _discrete_ করার সময় এক সেকেন্ডে নেওয়া নমুনার
সংখ্যা এবং এটি পরিমাপ করা হয় hertz (hz)। উদাহরণ স্বরূপ, সিডি-গুণমানের অডিওর Sampling Rate হলো ৪৪,১০০ Hz, যার অর্থ প্রতি সেকেন্ডে ৪৪,১০০
বার নমুনা নেওয়া হয়েছে। তুলনা করার জন্য, উচ্চ-রেজোলিউশন অডিওর একটি Sampling Rate হলো ১৯২,০০০ Hz বা ১৯২kHz। একটি সাধারণ প্রশিক্ষণ বক্তৃতা
মডেলগুলিতে ব্যবহৃত Sampling Rate হল ১৬,০০০ Hz বা ১৬ kHz।
Sampling Rate এর মান প্রাথমিকভাবে সিগন্যাল থেকে ক্যাপচার করা সর্বোচ্চ ফ্রিকোয়েন্সি নির্ধারণ করে। এটা Nyquist সীমা হিসাবে পরিচিত এবং এটি
Sampling Rate এর ঠিক অর্ধেক হয়। মানুষের বক্তৃতার শ্রবণযোগ্য ফ্রিকোয়েন্সি ৮ kHz এর নিচে এবং তাই ১৬ kHz এ নমুনা বক্তৃতা যথেষ্ট। একটি উচ্চ Sampling Rate
ব্যবহার করলে আরো তথ্য ক্যাপচার করা হবে না কিন্তু শুধুমাত্র ফাইল প্রক্রিয়াকরণের গণনামূলক খরচ বৃদ্ধি হবে। অন্যদিকে, Sampling Rate খুব কম হলে তথ্য এর
ক্ষতি হবে। ৮ kHz-এ ক্যাপচার করা মানুষের বক্তৃতা শ্রবণযোগ্য হবে না কারণ তাতে higher frequency ক্যাপচার হবে না ।
যেকোনো অডিও টাস্কে কাজ করার সময় আপনার ডেটাসেটের সমস্ত অডিওর Sampling Rate একই আছে কি না তা নিশ্চিত করা গুরুত্বপূর্ণ। আপনি যদি একটি pre-trained
মডেলকে fine-tune করার জন্য কাস্টম অডিও ডেটা ব্যবহার করার পরিকল্পনা করেন, তাহলে আপনার ডেটার Sampling Rate, অবশ্যই model টিকে pre-train
করার সময় ব্যবহার করা Sampling Rate এর সাথে মিলতে হবে। Sampling Rate ধারাবাহিকের মধ্যে সময়ের ব্যবধান নির্ধারণ করে অডিও নমুনা,
যা অডিও ডেটার অস্থায়ী রেজোলিউশনকে প্রভাবিত করে। উদাহরণ স্বরূপ: একটি 5-সেকেন্ডের অডিওর ১৬,০০০ Hz Sampling Rate থাকা মানে তার মধ্যে ৮০,০০০ খানা আলাদা
আলাদা মান আছে, কিন্তু যদি সেই একই অডিওর Sampling রাতে যদি ৮,০০০ Hz হতো তাহলে তাতে মোট ৪০,০০০ খানা আলাদা আলাদা মান থাকতো।
Transformer মডেল যেগুলি অডিও টাস্কগুলি সমাধান করে সেগুলি অডিও মানে শুধু সংখ্যা বোঝে তাই সঠিক সংখ্যার পরিমান খুবই গুরুত্বপূর্ণ ৷
যেহেতু আলাদা আলাদা অডিও ফাইল এর আলাদা আলাদা Sampling Rate আছে তাই সব ফাইলস গুলোকে একই Sampling Rate এ আন্তে হবে।
এই প্রক্রিয়াকে **Resampling** বলে। আমরা এর ব্যাপারে আরো জানতে পারবো [প্রিপ্রসেসিং](preprocessing#resampling-the-audio-data) অধ্যায় এ।
## Amplitude এবং bit depth
Sampling Rate আপনাকে বলে যে এর সেকেন্ডে কত ঘন ঘন নমুনা নেওয়া হয়, কিন্তু প্রতিটি নমুনার মানগুলি ঠিক কিরকম?
মানুষের কাছে শ্রবণযোগ্য ফ্রিকোয়েন্সিতে বাতাসের চাপের পরিবর্তনের মাধ্যমে শব্দ তৈরি হয়। একটি শব্দের **amplitude** বর্ণনা করে
যে কোনো তাৎক্ষণিক শব্দ চাপের মাত্রা এবং ডেসিবেল (dB) এ পরিমাপ করা হয়। আপনাকে একটি উদাহরণ দিতে, একটি সাধারণ কথা বলার ভয়েস ৬০ dB এর
নিচে এবং একটি রক কনসার্ট প্রায় ১২৫ dB হতে পারে।
ডিজিটাল অডিওতে, প্রতিটি অডিও নমুনা একটি সময়ে অডিও তরঙ্গের amplitude রেকর্ড করে। এর **bit depth**
নমুনা নির্ধারণ করে কত নির্ভুলতার সাথে এই প্রশস্ততার মান বর্ণনা করা যেতে পারে। bit depth যত বেশি, তত বেশি
বিশ্বস্তভাবে ডিজিটাল উপস্থাপনা মূল অবিচ্ছিন্ন শব্দ তরঙ্গকে আনুমানিক করে।
সবচেয়ে সাধারণ অডিও bit depth হল ১৬-বিট এবং ২৪-বিট। ১৬-বিট অডিওর জন্য ৬৫,৫৩৬ টি বাইনারি bits, ২৪-বিট অডিওর জন্য ১৬,৭৭৭,২১৬ টি
বাইনারি bits লাগে। যেহেতু quantizing প্রক্তিয়াটি একটি continious signal এর মানগুলোর শেষ এর সংখ্যাকে বাদ দেয় তাই এই প্রক্রিয়া সিগন্যাল এর সাথে
নয়েজ যুক্ত করে। বিট গভীরতা যত বেশি হবে, এই নয়েজ শব্দ তত কম হবে। সাধারণত, ১৬-বিট অডিওর কোয়ান্টাইজেশন নয়েজ ইতিমধ্যেই যথেষ্ট ছোট যা অশ্রাব্য হতে
পারে এবং উচ্চতর বিট গভীরতা ব্যবহার করা সাধারণত জরুরী নয়।
আপনি ৩২-বিট অডিওর ব্যাপারে শুনে থাকতে পারেন। এটি ফ্লোটিং-পয়েন্ট মান হিসাবে নমুনাগুলি সংরক্ষণ করে, যেখানে ১৬-বিট এবং ২৪-বিট অডিও পূর্ণসংখ্যা নমুনা
ব্যবহার করে। একটি ৩২-বিট ফ্লোটিং-পয়েন্ট মানের নির্ভুলতা হল ২৪ বিট, এটিকে ২৪-বিট অডিওর সমান bit depth দেয়। ফ্লোটিং-পয়েন্ট অডিও নমুনাগুলি [-১.0, ১.0]
সীমার মধ্যে থাকে এবং models গুলো স্বাভাবিকভাবেই ফ্লোটিং-পয়েন্ট ডেটার উপর কাজ করে, অডিও যদি ফ্লোটিং-পয়েন্ট এ না হয়ে থাকে তাহলে অডিওকে
প্রথমে ফ্লোটিং-পয়েন্ট ফরম্যাটে রূপান্তর করতে হব। এর ব্যাপারে আমরা [প্রিপ্রসেসিং](preprocessing) বিভাগে দেখবো।
অবিচ্ছিন্ন অডিও সংকেতের মতোই, ডিজিটাল অডিওর প্রশস্ততা সাধারণত ডেসিবেলে (dB) প্রকাশ করা হয়। মানুষের শ্রবণশক্তি লগারিদমিক প্রকৃতির - আমাদের কান
উচ্চস্বরের চেয়ে শান্ত শব্দে ছোট ওঠানামার প্রতি বেশি সংবেদনশীল । একটি শব্দের amplitudes ব্যাখ্যা করা সহজ যদি প্রশস্ততা ডেসিবেলে হয়, যা লগারিদমিকও হয়।
রিয়েল-ওয়ার্ল্ড অডিওর জন্য ডেসিবেল স্কেল ০ dB থেকে শুরু হয়, যা মানুষের শোনা সম্ভব সবচেয়ে শান্ত শব্দের প্রতিনিধিত্ব করে এবং উচ্চতর শব্দের dB মান আরো বাড়ে ।
যাইহোক, ডিজিটাল অডিও সিগন্যালের জন্য, 0 dB হল সবচেয়ে জোরে সম্ভাব্য প্রশস্ততা । একটি দ্রুত নিয়ম হিসাবে: প্রতি -৬ dB হল প্রশস্ততার অর্ধেক,
এবং -৬০ dB এর নীচের কিছু আপনি সত্যিই ভলিউম আপ না করলে শুনতে পারবেন না।
## একটি তরঙ্গরূপ হিসাবে অডিও
আপনি অডিও কে একটি **তরঙ্গরূপ** হিসাবে কল্পনা করা চিত্রগুলো দেখেছেন, যা সময়ের সাথে নমুনা মানগুলিকে প্লট করে এবং পরিবর্তনগুলিকে চিত্রিত করে শব্দের amplitude।
এটি শব্দের *time domain* উপস্থাপনা হিসাবেও পরিচিত।
এই ধরনের ভিজ্যুয়ালাইজেশন অডিও সিগন্যালের নির্দিষ্ট বৈশিষ্ট্য যেমন কোনো ঘটনার সময় চিহ্নিত করার জন্য, সিগন্যালের সামগ্রিক উচ্চতা এবং অডিওতে উপস্থিত
কোনো অনিয়ম বা শব্দ কে চিহ্নিত করে ।
একটি অডিও সিগন্যালের জন্য তরঙ্গরূপ প্লট করার জন্য, আমরা একটি python লাইব্রেরি ব্যবহার করতে পারি যার নাম 'librosa':
```bash
pip install librosa
```
আসুন লাইব্রেরির সাথে আসা "trumpet" নামক একটি উদাহরণ নেওয়া যাক:
```py
import librosa
array, sampling_rate = librosa.load(librosa.ex("trumpet"))
```
উদাহরণটিতে অডিওটি, টাইম সিরিজের একটি tuple হিসাবে লোড করা হয়েছে (এখানে আমরা এটিকে `array` বলি), এবং Sampling Rate (`sampling_rate`)।
চলুন librosa র `waveshow()` ফাংশন ব্যবহার করে এই শব্দের তরঙ্গরূপটি একবার দেখে নেওয়া যাক:
```py
import matplotlib.pyplot as plt
import librosa.display
plt.figure().set_figwidth(12)
librosa.display.waveshow(array, sr=sampling_rate)
```
<div class="flex justify-center">
<img src="https://huggingface.co/datasets/huggingface-course/audio-course-images/resolve/main/waveform_plot.png" alt="Waveform plot">
</div>
এটি y-অক্ষে সংকেতের amplitude এবং x-অক্ষ বরাবর সময় প্লট করে। অন্য কথায়, প্রতিটি পয়েন্ট নির্দেশ করে একটি একক নমুনা মান যা এই শব্দের নমুনা নেওয়ার
সময় নেওয়া হয়েছিল। এছাড়াও মনে রাখবেন যে librosa ফ্লোটিং-পয়েন্ট মানগুলোকে অডিও হিসাবে প্রদান করে, এবং প্রশস্ততার মানগুলি প্রকৃতপক্ষে [-১.০, ১.০] পরিসরের মধ্যে রয়েছে৷
আপনি যে ডেটা নিয়ে কাজ করছেন তা বোঝার জন্য এটিকে শোনার পাশাপাশি অডিওটি ভিজ্যুয়ালাইজ করা একটি দরকারী টুল হতে পারে।
আপনি সংকেতের আকার দেখতে পারেন, নিদর্শনগুলি পর্যবেক্ষণ করতে পারেন, শব্দ বা বিকৃতি দেখতে পারেন। আপনি যদি কিছু ডেটা প্রক্রিয়া করেন যেমন normalization, resampling
অথবা filtering তাহলে আপনি প্লটটি দেখে নিশ্চিত হতে পারেন যে প্রক্রিয়াকরণের পদক্ষেপগুলি ঠিক প্রয়োগ করা হয়েছে। একটি মডেল এর training এর পরে,
যদি কোনো সমস্যা আসে তাহলে আপনি debug করার সময় এই প্লটস গুলোর সাহায্য নিতে পারবেন।
## ফ্রিকোয়েন্সি বর্ণালী
অডিও ডেটা কল্পনা করার আরেকটি উপায় হল একটি অডিও সিগন্যালের **ফ্রিকোয়েন্সি বর্ণালী** প্লট করা, যা *frequency domain* নামেও পরিচিত।
Discrete Fourier transform or DFT ব্যবহার করে এই বর্ণালী গণনা করা হয়। এটি পৃথক ফ্রিকোয়েন্সি গুলোকে বর্ণনা করে যারা একসাথে সংকেত তৈরি করে এবং তারা কতটা শক্তিশালী।
আসুন numpy এর `rfft()` ফাংশন ব্যবহার করে DFT নিয়ে একই ট্রাম্পেট শব্দের জন্য ফ্রিকোয়েন্সি বর্ণালী প্লট করি। যদিও এটা
পুরো শব্দের বর্ণালী প্লট করা সম্ভব, এর পরিবর্তে একটি ছোট অঞ্চলের দিকে তাকানো আরও কার্যকর। এখানে আমরা প্রথম ৪০৯৬ টি নমুনার উপরে DFT প্রয়োগ করবো যাতে মোটামুটিভাবে প্রথম নোটের দৈর্ঘ্য প্লে হচ্ছে:
```py
import numpy as np
dft_input = array[:4096]
# calculate the DFT
window = np.hanning(len(dft_input))
windowed_input = dft_input * window
dft = np.fft.rfft(windowed_input)
# get the amplitude spectrum in decibels
amplitude = np.abs(dft)
amplitude_db = librosa.amplitude_to_db(amplitude, ref=np.max)
# get the frequency bins
frequency = librosa.fft_frequencies(sr=sampling_rate, n_fft=len(dft_input))
plt.figure().set_figwidth(12)
plt.plot(frequency, amplitude_db)
plt.xlabel("Frequency (Hz)")
plt.ylabel("Amplitude (dB)")
plt.xscale("log")
```
<div class="flex justify-center">
<img src="https://huggingface.co/datasets/huggingface-course/audio-course-images/resolve/main/spectrum_plot.png" alt="Spectrum plot">
</div>
এটি এই অডিও সেগমেন্টে উপস্থিত বিভিন্ন ফ্রিকোয়েন্সি উপাদানগুলির শক্তি প্লট করে৷ ফ্রিকোয়েন্সি মান গুলি x-অক্ষ তে আছে এবং তাদের amplitude, y-অক্ষে থাকে। সাধারণত x-অক্ষকে লগারিদমিক স্কেলে প্লট করা হয়।
আমরা যে ফ্রিকোয়েন্সি বর্ণালী প্লট করেছি তা বেশ কয়েকটি শিখর দেখায়। এই শিখরগুলি যেই নোট প্লে করা হচ্ছে তার হারমোনিক্সের সাথে মিলে যায় যেখানে উচ্চ হারমোনিক্সগুলো শান্ত। যেহেতু প্রথম শিখরটি প্রায় ৬২০ Hz, এটি একটি E♭ নোটের ফ্রিকোয়েন্সি বর্ণালী।
DFT এর আউটপুট হল জটিল সংখ্যার একটি অ্যারে, যা বাস্তব এবং কাল্পনিক উপাদান দিয়ে গঠিত।
`np.abs(dft)` এর মাদ্ধমে আমরা amplitude তথ্য বের করি। বাস্তব এবং কাল্পনিক উপাদান এর মধ্যে কোণকে ফেজ বর্ণালী বলে, কিন্তু এটি মেশিন লার্নিং অ্যাপ্লিকেশনে নেওয়া হয় না।
আপনি amplitude মানগুলিকে ডেসিবেল স্কেলে রূপান্তর করতে `librosa.amplitude_to_db()` ব্যবহার করতে পারেন, এটি বর্ণালী মধ্যে সূক্ষ্ম বিবরণ টিকে দেখতে সহজ করে তোলে। কখনও কখনও লোকেরা **power বর্ণালী** ব্যবহার করে, যা amplitude এর পরিবর্তে শক্তি পরিমাপ করে;
যা কেবলমাত্র amplitude এর মান এর বর্গাকার।
<Tip>
💡 বাস্তবে, লোকেরা FFT শব্দটি DFT-এর সাথে বিনিময়যোগ্যভাবে ব্যবহার করে, কারণ FFT বা Fast Fourier Transform হলো কম্পিউটারে DFT গণনা করার একমাত্র কার্যকরী উপায়।
</Tip>
একটি অডিও সিগন্যালের ফ্রিকোয়েন্সি বর্ণালীতে তার তরঙ্গরূপের মতো একই তথ্য থাকে - তারা কেবল দুটি ভিন্ন উপায়, একই ডেটাকে দেখার। যেখানে তরঙ্গরূপ amplitude প্লট করে
সময়ের সাথে সাথে অডিও সিগন্যালের এবং বর্ণালী নির্দিষ্ট সময়ে পৃথক ফ্রিকোয়েন্সির amplitude কল্পনা করে।
## spectrogram
আমরা যদি দেখতে চাই কিভাবে একটি অডিও সিগন্যালের ফ্রিকোয়েন্সি পরিবর্তন হয়? ট্রাম্পেট বেশ কয়েকটি নোট বাজায় এবং তাদের বিভিন্ন ফ্রিকোয়েন্সি আছে, সমস্যা হল যে
স্পেকট্রাম শুধুমাত্র একটি সময়ে প্রদত্ত তাত্ক্ষণিক ফ্রিকোয়েন্সিগুলির প্লট দেখায়। সমাধানটি হল একাধিক DFT নেওয়া, প্রতিটিতে সমগ্র সময়ের কেবলমাত্র একটি ছোট স্লাইস কে
ব্যবহার করা এবং তার পরে সব স্পেকট্রাকে একসাথে জুড়ে দেওয়া। এটাকে **spectrogram** বলে।
একটি spectrogram একটি অডিও সিগন্যালের ফ্রিকোয়েন্সি বিষয়বস্তু প্লট করে কারণ এটি সময়ের সাথে পরিবর্তিত হয়। এটি আপনাকে একই প্লটে সময়, ফ্রিকোয়েন্সি এবং
তার সাথে amplitude প্রদর্শন করে। যে অ্যালগরিদমটি এই গণনাটি সম্পাদন করে তা হল STFT বা Short Time Fourier Transform।
চলুন লিব্রোসার `stft()` এবং `specshow()` ফাংশন ব্যবহার করে একই ট্রাম্পেট শব্দের জন্য একটি বর্ণালী প্লট করি:
```py
import numpy as np
D = librosa.stft(array)
S_db = librosa.amplitude_to_db(np.abs(D), ref=np.max)
plt.figure().set_figwidth(12)
librosa.display.specshow(S_db, x_axis="time", y_axis="hz")
plt.colorbar()
```
<div class="flex justify-center">
<img src="https://huggingface.co/datasets/huggingface-course/audio-course-images/resolve/main/spectrogram_plot.png" alt="Spectrogram plot">
</div>
এই প্লটে, x-অক্ষ সময়কে উপস্থাপন করে কিন্তু এখন y-অক্ষ Hz-এ ফ্রিকোয়েন্সি উপস্থাপন করে।
রঙের তীব্রতা ডেসিবেলে (dB) পরিমাপ করা সময়ে প্রতিটি বিন্দুতে ফ্রিকোয়েন্সি উপাদানটির amplitude বা শক্তি দেয়।
spectrogram টি অডিও সিগন্যালের ছোট অংশ গ্রহণ করে, যা সাধারণত কয়েক মিলিসেকেন্ড স্থায়ী হয় এবং তার উপর DFT গণনা করে তৈরি করা হয় ফ্রিকোয়েন্সি বর্ণালী।
তারপর বর্ণালীগুলিকে সময় অক্ষের উপর স্ট্যাক করা হয় এবং spectrogram তৈরি করা হয়। এই চিত্রের প্রতিটি উল্লম্ব স্লাইস একটি একক ফ্রিকোয়েন্সি বর্ণালীকে বোঝাই।
`librosa.stft()` অডিও সংকেতকে ২০৪৮ নমুনার অংশে বিভক্ত করে, যা ফ্রিকোয়েন্সি রেজোলিউশন এবং সময় রেজোলিউশনের মধ্যে একটি ভালো ট্রেড-অফ দেয়।
যেহেতু spectrogram এবং তরঙ্গরূপ একই ডেটার ভিন্ন ভিন্ন দৃষ্টিভঙ্গি, তাই spectrogram থেকে তরঙ্গরূপ এ ফিরিয়ে আনা সম্ভব, তার জন্যে inverse STFT ব্যবহার করতে হবে।
যাইহোক, এর জন্য amplitude ছাড়াও ফেজ তথ্য প্রয়োজন। যদি spectrogram একটি মেশিন লার্নিং মডেল দ্বারা তৈরি করা হয়, তবে এটি সাধারণত শুধুমাত্র
amplitude আউটপুট করে। সেই ক্ষেত্রে, আমরা একটি ফেজ পুনর্গঠন অ্যালগরিদম ব্যবহার করতে পারি যেমন ক্লাসিক Griffin-Lim অ্যালগরিদম, বা একটি নিউরাল
নেটওয়ার্ক ব্যবহার করতে পারি যেটাকে একটি vocoder বলা হয়।
spectrogram গুলি কেবল ভিজ্যুয়ালাইজেশনের জন্য ব্যবহৃত হয় না। অনেক মেশিন লার্নিং মডেল spectrogram কে ইনপুট হিসাবে গ্রহণ করবে এবং আউটপুট হিসাবে spectrogram তৈরি করে।
এখন যেহেতু আমরা জানি একটি spectrogram কী এবং এটি কীভাবে তৈরি করা হয়, আসুন এটির একটি রূপ দেখে নেওয়া যাক যা বক্তৃতা প্রক্রিয়াকরণের জন্য ব্যাপকভাবে ব্যবহৃত হয়: mel spectrogram।
## mel spectrogram
একটি mel spectrogram হল spectrogram এর একটি বৈচিত্র যা সাধারণত বক্তৃতা প্রক্রিয়াকরণ এর কাজে ব্যবহৃত হয়।
এটি একটি spectrogram এর মতো যে এটি সময়ের সাথে একটি অডিও সিগন্যালের ফ্রিকোয়েন্সি বিষয়বস্তু দেখায়, তবে একটির ফ্রিকোয়েন্সি অক্ষে ভিন্ন।
একটি স্ট্যান্ডার্ড spectrogram এ, ফ্রিকোয়েন্সি অক্ষ রৈখিক এবং হার্টজ (Hz) এ পরিমাপ করা হয়। তবে মানুষের শ্রবণতন্ত্র
উচ্চ ফ্রিকোয়েন্সির তুলনায় নিম্ন ফ্রিকোয়েন্সি পরিবর্তনের জন্য বেশি সংবেদনশীল, এবং এই সংবেদনশীলতা ফ্রিকোয়েন্সি বৃদ্ধির সাথে সাথে লগারিদমিকভাবে হ্রাস পায।
মেল স্কেল একটি অনুধাবনযোগ্য স্কেল যা মানুষের কানের অ-রৈখিক ফ্রিকোয়েন্সি প্রতিক্রিয়াকে অনুমান করে।
একটি মেল spectrogram তৈরি করতে, STFT ঠিক আগের মতোই ব্যবহার করা হয়, একটি অডিওটিকে ছোট অংশে বিভক্ত করে ফ্রিকোয়েন্সি বর্ণালীগুলিকে তৈরী করা হয়।
উপরন্তু, প্রতিটি বর্ণালীকে একটি ফিল্টারের সেটের মধ্যে দিয়ে পাঠানো হয়, যা মেল ফিল্টারব্যাঙ্ক হিসেবে পরিচিত, যা ফ্রিকোয়েন্সিগুলিকে মেল স্কেলে রূপান্তর করুন।
আসুন দেখি কিভাবে আমরা librosa এর `melspectrogram()` ফাংশন ব্যবহার করে একটি mel spectrogram প্লট করতে পারি, যা আমাদের জন্য এই সমস্ত পদক্ষেপগুলি সম্পাদন করে:
```py
S = librosa.feature.melspectrogram(y=array, sr=sampling_rate, n_mels=128, fmax=8000)
S_dB = librosa.power_to_db(S, ref=np.max)
plt.figure().set_figwidth(12)
librosa.display.specshow(S_dB, x_axis="time", y_axis="mel", sr=sampling_rate, fmax=8000)
plt.colorbar()
```
<div class="flex justify-center">
<img src="https://huggingface.co/datasets/huggingface-course/audio-course-images/resolve/main/mel-spectrogram.png" alt="Mel spectrogram plot">
</div>
উপরের উদাহরণে, `n_mels` হল মেল ব্যান্ডের সংখ্যা। মেল ব্যান্ডগুলি হলো ফ্রিকোয়েন্সির একটি সেট যেটি ফিল্টারের এর সাহায্যে বর্ণালীকে অর্থপূর্ণ উপাদানে ভাগ করে
যেখানে ফিল্টার গুলোকে এমনভাবে বেছে নেওয়া হয় যার আকৃতি এবং ব্যবধান মানুষের কান যেভাবে বিভিন্ন ফ্রিকোয়েন্সিতে সাড়া দেয় তা অনুকরণ করতে পারে।
`n_mels`-এর সাধারণ মান হল ৪০ বা ৮০৷ `fmax` সর্বোচ্চ ফ্রিকোয়েন্সি নির্দেশ করে (Hz-এ) আমরা যে বিষয়ে চিন্তা করি।
নিয়মিত spectrogram এর মতোই, মেল ফ্রিকোয়েন্সির উপাদানগুলির শক্তি প্রকাশ করার সাধারণ অভ্যাস ডেসিবেলে, এটিকে সাধারণত **log-mel spectrogram** বলা হয়,
কারণ ডেসিবেলে রূপান্তর করার জন্যে লগারিদমিক অপারেশন প্রয়োগ করতে হয়। উপরের উদাহরণটি `librosa.power_to_db()` ব্যবহার করে `librosa.feature.melspectrogram()`
একটি power spectrogram তৈরি করে।
<Tip>
💡 সব mel spectrogram একই নয়! সাধারণ ব্যবহারে দুটি ভিন্ন মেল স্কেল আছে ("htk" এবং "slaney"),
এবং power spectrogram এর পরিবর্তে amplitude spectrogram ব্যবহার করা যেতে পারে। log-mel spectrogram গণনা করার সময় সর্বদা সত্য ডেসিবেল
গণনা করা হয় না, কিন্তু সহজভাবে `লগ` নেওয়া হতে পারে। অতএব, যদি একটি মেশিন লার্নিং মডেল ইনপুট হিসাবে একটি mel spectrogram আশা করে,
আপনি একই ভাবে কম্পিউট করছেন তা নিশ্চিত করতে দুবার চেক করুন।
</Tip>
একটি mel spectrogram তৈরি করা একটি ক্ষতিকর অপারেশন কারণ এতে সিগন্যাল ফিল্টার করা জড়িত। একটি mel spectrogram কে নিয়মিত তরঙ্গরূপ এ রূপান্তর করা
খুবই কঠিন, এমনকি সাহারণ spectrogram কে নিয়মিত তরঙ্গরূপ এ রূপান্তর করা এর চেয়ে সহজ, কারণ এর জন্য যেই ফ্রিকোয়েন্সিগুলিকে ফেলে দেওয়া হয়েছিল সেগুলোকে
অনুমান করা প্রয়োজন। এই কারণেই একটি mel spectrogram থেকে একটি তরঙ্গরূপ তৈরি করতে মেশিন লার্নিং মডেল যেমন HiFiGAN vocoder প্রয়োজন।
একটি স্ট্যান্ডার্ড spectrogram এর তুলনায়, একটি mel spectrogram অডিও সিগন্যালের আরও অর্থপূর্ণ বৈশিষ্ট্যগুলি ক্যাপচার করতে পারে তাই এটি speech recognition,
speaker identification, এবং music genre classification এর মতো কাজগুলিতে একটি জনপ্রিয় প্রক্রিয়া করে তোলে।
এখন যেহেতু আপনি অডিও ডেটা উদাহরণগুলি কীভাবে কল্পনা করতে হয় তা জানেন, এগিয়ে যান এবং আপনার প্রিয় শব্দগুলি কেমন তা দেখতে চেষ্টা করুন :) | 2 |
0 | hf_public_repos/audio-transformers-course/chapters/bn | hf_public_repos/audio-transformers-course/chapters/bn/chapter1/load_and_explore.mdx | # অডিও ডাটাসেটকে লোড এবং বিশ্লেষণ করা
এই কোর্সে আমরা অডিও ডেটাসেটের সাথে কাজ করার জন্য 🤗 ডেটাসেট লাইব্রেরি ব্যবহার করব। 🤗 ডেটাসেট হল একটি ওপেন সোর্স লাইব্রেরি যাতে অডিও সহ সমস্ত
পদ্ধতি থেকে ডেটাসেট ডাউনলোড এবং প্রস্তুত করা যায়। Hugging Face Hub-এ সর্বজনীনভাবে উপলব্ধ মেশিন লার্নিং ডেটাসেটের নির্বাচন করার জন্যে 🤗 ডেটাসেট একটি অতুলনীয় সহজ উপায়।
তাছাড়া, 🤗 ডেটাসেট গবেষক এবং অনুশীলনকারী উভয়ের জন্য এই ধরনের ডেটাসেটের সাথে কাজ করা সহজ করে।
অডিও ডেটাসেটগুলির সাথে কাজ শুরু করতে, নিশ্চিত করুন যে আপনার কাছে 🤗 ডেটাসেট লাইব্রেরি ইনস্টল করা আছে:
```bash
pip install datasets[audio]
```
🤗 ডেটাসেটের মূল বৈশিষ্ট্যগুলির মধ্যে একটি হল `load_dataset()` ফাংশন ব্যবহার করে শুধুমাত্র একটি লাইনে একটি ডেটাসেট ডাউনলোড এবং প্রস্তুত করার ক্ষমতা।
আসুন [MINDS-14](https://huggingface.co/datasets/PolyAI/minds14) নামক অডিও ডেটাসেট লোড এবং অন্বেষণ করি, যার মধ্যে রয়েছে বিভিন্ন ভাষা
এবং উপভাষায় একটি ই-ব্যাংকিং সিস্টেমের প্রশ্ন জিজ্ঞাসা করা লোকেদের রেকর্ডিং।
MINDS-14 ডেটাসেট লোড করার জন্য, আমাদের হাবে (`PolyAI/minds14`) ডেটাসেটের শনাক্তকারী কপি করে পাস করতে হবে `লোড_ডেটাসেট` ফাংশনে। আমরা
এটাও উল্লেখ করব যে আমরা শুধুমাত্র অস্ট্রেলিয়ান সাবসেটে (`en-AU`) আগ্রহী, এবং train বিভাজনে সীমাবদ্ধ করুন:
```py
from datasets import load_dataset
minds = load_dataset("PolyAI/minds14", name="en-AU", split="train")
minds
```
**আউটপুট:**
```out
Dataset(
{
features: [
"path",
"audio",
"transcription",
"english_transcription",
"intent_class",
"lang_id",
],
num_rows: 654,
}
)
```
ডেটাসেটে ৬৫৪ টি অডিও ফাইল রয়েছে, যার প্রতিটির সাথে একটি ট্রান্সক্রিপশন, একটি ইংরেজি অনুবাদ এবং একটি লেবেল রয়েছে
ব্যক্তির প্রশ্নের পিছনে অভিপ্রায় ইঙ্গিত. অডিও কলামে কাঁচা অডিও ডেটা থাকে। এর একটি ঘনিষ্ঠভাবে কটাক্ষপাত করা যাক
উদাহরণগুলির একটিতে:
```py
example = minds[0]
example
```
**আউটপুট:**
```out
{
"path": "/root/.cache/huggingface/datasets/downloads/extracted/f14948e0e84be638dd7943ac36518a4cf3324e8b7aa331c5ab11541518e9368c/en-AU~PAY_BILL/response_4.wav",
"audio": {
"path": "/root/.cache/huggingface/datasets/downloads/extracted/f14948e0e84be638dd7943ac36518a4cf3324e8b7aa331c5ab11541518e9368c/en-AU~PAY_BILL/response_4.wav",
"array": array(
[0.0, 0.00024414, -0.00024414, ..., -0.00024414, 0.00024414, 0.0012207],
dtype=float32,
),
"sampling_rate": 8000,
},
"transcription": "I would like to pay my electricity bill using my card can you please assist",
"english_transcription": "I would like to pay my electricity bill using my card can you please assist",
"intent_class": 13,
"lang_id": 2,
}
```
আপনি লক্ষ্য করতে পারেন যে অডিও কলামে বেশ কয়েকটি বৈশিষ্ট্য রয়েছে। এখানে তারা কি:
* `path`: অডিও ফাইলের পথ (এই ক্ষেত্রে `*.wav`)।
* `array`: ডিকোড করা অডিও ডেটা, একটি 1-মাত্রিক NumPy অ্যারে হিসাবে উপস্থাপিত।
* `sampling_rate`। অডিও ফাইলের sampling_rate (এই উদাহরণে ৮,০০০ Hz)।
'intent_class' হল অডিও রেকর্ডিংয়ের একটি শ্রেণীবিভাগ। এই সংখ্যাটিকে একটি অর্থপূর্ণ স্ট্রিংয়ে রূপান্তর করতে,
আমরা `int2str()` পদ্ধতি ব্যবহার করতে পারি:
```py
id2label = minds.features["intent_class"].int2str
id2label(example["intent_class"])
```
**আউটপুট:**
```out
"pay_bill"
```
আপনি যদি ট্রান্সক্রিপশন বৈশিষ্ট্যটি দেখেন তবে আপনি দেখতে পাবেন যে অডিও ফাইলটি প্রকৃতপক্ষে একজন ব্যক্তিকে একটি প্রশ্ন জিজ্ঞাসা করে রেকর্ড করেছে
বিল পরিশোধ সম্পর্কে।
আপনি যদি ডেটার এই উপসেটে একটি audio classifier কে train করার পরিকল্পনা করেন, তাহলে আপনার সমস্ত বৈশিষ্ট্যের প্রয়োজন নাও হতে পারে। উদাহরণ স্বরূপ,
সব উদাহরণের জন্য `lang_id`-এর মান একই হবে এবং কাজে লাগবে না। `english_transcription` সম্ভবত হবে
এই উপসেটে `transcription` নকল করুন, যাতে আমরা নিরাপদে সেগুলি সরাতে পারি।
আপনি 🤗 ডেটাসেটগুলির `remove_columns` পদ্ধতি ব্যবহার করে সহজেই অপ্রাসঙ্গিক বৈশিষ্ট্যগুলি সরাতে পারেন:
```py
columns_to_remove = ["lang_id", "english_transcription"]
minds = minds.remove_columns(columns_to_remove)
minds
```
**আউটপুট:**
```out
Dataset({features: ["path", "audio", "transcription", "intent_class"], num_rows: 654})
```
এখন যেহেতু আমরা ডেটাসেটের কাঁচা বিষয়বস্তু লোড এবং পরিদর্শন করেছি, আসুন কয়েকটি উদাহরণ শুনি! আমরা 'Gradio' থেকে 'Blocks' এবং 'Audio' ব্যবহার করব
ডেটাসেট থেকে কয়েকটি নমুনা ডিকোড করতে।
```py
import gradio as gr
def generate_audio():
example = minds.shuffle()[0]
audio = example["audio"]
return (
audio["sampling_rate"],
audio["array"],
), id2label(example["intent_class"])
with gr.Blocks() as demo:
with gr.Column():
for _ in range(4):
audio, label = generate_audio()
output = gr.Audio(audio, label=label)
demo.launch(debug=True)
```
আপনি যদি চান, আপনি কিছু উদাহরণও পর্যবেক্ষণ করতে পারেন। প্রথম উদাহরণের তরঙ্গরূপ প্লট করা যাক।
```py
import librosa
import matplotlib.pyplot as plt
import librosa.display
array = example["audio"]["array"]
sampling_rate = example["audio"]["sampling_rate"]
plt.figure().set_figwidth(12)
librosa.display.waveshow(array, sr=sampling_rate)
```
<div class="flex justify-center">
<img src="https://huggingface.co/datasets/huggingface-course/audio-course-images/resolve/main/waveform_unit1.png" alt="Waveform plot">
</div>
এখন নিজে চেষ্টা করুন! MINDS-14 ডেটাসেটের অন্য একটি উপভাষা বা ভাষা ডাউনলোড করুন, উপলব্ধি করতে কিছু উদাহরণ শুনুন এবং পর্যবেক্ষণ করুন। আপনি
[এখানে](https://huggingface.co/datasets/PolyAI/minds14) উপলব্ধ ভাষার সম্পূর্ণ তালিকা পেতে পারেন।
| 3 |
0 | hf_public_repos/audio-transformers-course | hf_public_repos/audio-transformers-course/utils/carbon-config.json | {"paddingVertical":"1px","paddingHorizontal":"1px","backgroundImage":null,"backgroundImageSelection":null,"backgroundMode":"color","backgroundColor":"rgba(255,255,255,1)","dropShadow":false,"dropShadowOffsetY":"20px","dropShadowBlurRadius":"68px","theme":"one-light","windowTheme":"none","language":"python","fontFamily":"Fira Code","fontSize":"14px","lineHeight":"152%","windowControls":false,"widthAdjustment":true,"lineNumbers":false,"firstLineNumber":1,"exportSize":"2x","watermark":false,"squaredImage":false,"hiddenCharacters":false,"name":"","width":680,"highlights":{"keyword":"rgba(139,92,246,1)","variable":"rgba(236,72,153,1)","number":"rgba(180,83,9,1)","string":"rgba(80,161,79,1)"}} | 4 |
0 | hf_public_repos/audio-transformers-course | hf_public_repos/audio-transformers-course/utils/generate_notebooks.py | import argparse
import os
import re
import nbformat
import shutil
import yaml
from pathlib import Path
re_framework_test = re.compile(r"^{#if\s+fw\s+===\s+'([^']+)'}\s*$")
re_framework_else = re.compile(r"^{:else}\s*$")
re_framework_end = re.compile(r"^{/if}\s*$")
re_html_line = re.compile(r"^<[^>]*/>\s*$")
re_html_tag = re.compile(r"<([^/>]*)>\s*$")
re_python_code = re.compile(r"^```(?:py|python|py no\-format|python no\-format)\s*$")
re_output_code = re.compile(r"^```(?:py|python)\s+out\s*$")
re_end_code = re.compile(r"^```\s*$")
frameworks = {"pt": "PyTorch", "tf": "TensorFlow"}
PATH_TO_COURSE = Path("chapters/")
def read_and_split_frameworks(fname):
"""
Read the MDX in fname and creates two versions (if necessary) for each framework.
"""
with open(fname, "r") as f:
content = f.readlines()
contents = {"pt": [], "tf": []}
differences = False
current_content = []
line_idx = 0
for line in content:
if re_framework_test.search(line) is not None:
differences = True
framework = re_framework_test.search(line).groups()[0]
for key in contents:
contents[key].extend(current_content)
current_content = []
elif re_framework_else.search(line) is not None:
contents[framework].extend(current_content)
current_content = []
framework = "pt" if framework == "tf" else "tf"
elif re_framework_end.search(line) is not None:
contents[framework].extend(current_content)
current_content = []
else:
current_content.append(line)
if len(current_content) > 0:
for key in contents:
contents[key].extend(current_content)
if differences:
return {k: "".join(content) for k, content in contents.items()}
else:
return "".join(content)
def extract_cells(content):
"""
Extract the code/output cells from content.
"""
cells = []
current_cell = None
is_output = False
for line in content.split("\n"):
if re_python_code.search(line) is not None:
is_output = False
current_cell = []
elif re_output_code.search(line) is not None:
is_output = True
current_cell = []
elif re_end_code.search(line) is not None and current_cell is not None:
cell = "\n".join(current_cell)
if is_output:
if not isinstance(cells[-1], tuple):
cells[-1] = (cells[-1], cell)
else:
cells.append(cell)
current_cell = None
current_md = []
elif current_cell is not None:
current_cell.append(line)
return cells
def convert_to_nb_cell(cell):
"""
Convert some cell (either just code or tuple (code, output)) to a proper notebook cell.
"""
nb_cell = {"cell_type": "code", "execution_count": None, "metadata": {}}
if isinstance(cell, tuple):
nb_cell["source"] = cell[0]
nb_cell["outputs"] = [
nbformat.notebooknode.NotebookNode(
{
"data": {"text/plain": cell[1]},
"execution_count": None,
"metadata": {},
"output_type": "execute_result",
}
)
]
else:
nb_cell["source"] = cell
nb_cell["outputs"] = []
return nbformat.notebooknode.NotebookNode(nb_cell)
def nb_cell(source, code=True):
if not code:
return nbformat.notebooknode.NotebookNode({"cell_type": "markdown", "source": source, "metadata": {}})
return nbformat.notebooknode.NotebookNode(
{"cell_type": "code", "metadata": {}, "source": source, "execution_count": None, "outputs": []}
)
def build_notebook(fname, title, output_dir="."):
"""
Build the notebook for fname with a given title in output_dir.
"""
sections = read_and_split_frameworks(fname)
sections_with_accelerate = [
"chapter3/4", # "A full training",
"chapter7/2_pt", # "Token classification (PyTorch)",
"chapter7/3_pt", # "Fine-tuning a masked language model (PyTorch)"
"chapter7/4_pt", # "Translation (PyTorch)"
"chapter7/5_pt", # "Summarization (PyTorch)",
"chapter7/6_pt", # "Training a causal language model from scratch (PyTorch)"
"chapter7/7_pt", # "Question answering (PyTorch)"
]
sections_with_hf_hub = [
"chapter4/3_pt", # "Sharing pretrained models (PyTorch)"
"chapter4/3_tf", # "Sharing pretrained models (TensorFlow)"
"chapter5/5", # "Creating your own dataset"
"chapter7/2_pt", # "Token classification (PyTorch)"
"chapter7/2_tf", # "Token classification (TensorFlow)"
"chapter6/2", # "Training a new tokenizer from an old one"
"chapter7/3_pt", # "Fine-tuning a masked language model (PyTorch)"
"chapter7/3_tf", # "Fine-tuning a masked language model (TensorFlow)"
"chapter7/4_pt", # "Translation (PyTorch)"
"chapter7/4_tf", # "Translation (TensorFlow)"
"chapter7/5_pt", # "Summarization (PyTorch)"
"chapter7/5_tf", # "Summarization (TensorFlow)"
"chapter7/6_pt", # "Training a causal language model from scratch (PyTorch)"
"chapter7/6_tf", # "Training a causal language model from scratch (TensorFlow)"
"chapter7/7_pt", # "Question answering (PyTorch)"
"chapter7/7_tf", # "Question answering (TensorFlow)"
"chapter8/2", # "What to do when you get an error"
]
sections_with_faiss = [
"chapter5/6_pt", # "Semantic search with FAISS (PyTorch)"
"chapter5/6_tf", # "Semantic search with FAISS (TensorFlow)"
]
sections_with_gradio = [
"chapter9/2", # "Building your first demo"
"chapter9/3", # "Understanding the Interface class"
"chapter9/4", # "Sharing demos with others"
"chapter9/5", # "Integrations with the Hugging Face Hub"
"chapter9/6", # "Advanced Interface features"
"chapter9/7", # "Introduction to Blocks"
]
stem = Path(fname).stem
if not isinstance(sections, dict):
contents = [sections]
titles = [title]
fnames = [f"section{stem}.ipynb"]
section_names = [f"{Path(fname).parent.stem}/{stem}"]
else:
contents = []
titles = []
fnames = []
section_names = []
for key, section in sections.items():
contents.append(section)
titles.append(f"{title} ({frameworks[key]})")
fnames.append(f"section{stem}_{key}.ipynb")
section_names.append(f"{Path(fname).parent.stem}/{stem}_{key}")
for title, content, fname, section_name in zip(titles, contents, fnames, section_names):
cells = extract_cells(content)
if len(cells) == 0:
continue
nb_cells = [
nb_cell(f"# {title}", code=False),
nb_cell("Install the Transformers, Datasets, and Evaluate libraries to run this notebook.", code=False),
]
# Install cell
installs = ["!pip install datasets evaluate transformers[sentencepiece]"]
if section_name in sections_with_accelerate:
installs.append("!pip install accelerate")
installs.append("# To run the training on TPU, you will need to uncomment the followin line:")
installs.append(
"# !pip install cloud-tpu-client==0.10 torch==1.9.0 https://storage.googleapis.com/tpu-pytorch/wheels/torch_xla-1.9-cp37-cp37m-linux_x86_64.whl"
)
if section_name in sections_with_hf_hub:
installs.append("!apt install git-lfs")
if section_name in sections_with_faiss:
installs.append("!pip install faiss-gpu")
if section_name in sections_with_gradio:
installs.append("!pip install gradio")
nb_cells.append(nb_cell("\n".join(installs)))
if section_name in sections_with_hf_hub:
nb_cells.extend(
[
nb_cell(
"You will need to setup git, adapt your email and name in the following cell.", code=False
),
nb_cell(
'!git config --global user.email "[email protected]"\n!git config --global user.name "Your Name"'
),
nb_cell(
"You will also need to be logged in to the Hugging Face Hub. Execute the following and enter your credentials.",
code=False,
),
nb_cell("from huggingface_hub import notebook_login\n\nnotebook_login()"),
]
)
nb_cells += [convert_to_nb_cell(cell) for cell in cells]
metadata = {"colab": {"name": title, "provenance": []}}
nb_dict = {"cells": nb_cells, "metadata": metadata, "nbformat": 4, "nbformat_minor": 4}
notebook = nbformat.notebooknode.NotebookNode(nb_dict)
os.makedirs(output_dir, exist_ok=True)
nbformat.write(notebook, os.path.join(output_dir, fname), version=4)
def get_titles(language):
"""
Parse the _toctree.yml file to get the correspondence filename to title
"""
table = yaml.safe_load(open(os.path.join(f"chapters/{language}", "_toctree.yml"), "r"))
result = {}
for entry in table:
for section in entry["sections"]:
section_title = section["title"]
if "local_fw" in section:
section_names = section["local_fw"]
result[section_names["pt"]] = section_title
result[section_names["tf"]] = section_title
else:
section_name = section["local"]
result[section_name] = section_title
return {k: v for k, v in result.items() if "quiz" not in v}
def create_notebooks(language, output_dir):
if not os.path.exists(output_dir):
os.makedirs(output_dir)
for folder in os.listdir(output_dir):
if folder.startswith("chapter"):
shutil.rmtree(os.path.join(output_dir, folder))
titles = get_titles(language)
for fname, title in titles.items():
build_notebook(
os.path.join(f"chapters/{language}", f"{fname}.mdx"),
title,
os.path.join(output_dir, Path(fname).parent),
)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--output_dir", type=str, help="Where to output the notebooks")
args = parser.parse_args()
languages = [f.stem for f in PATH_TO_COURSE.iterdir() if f.is_dir()]
for language in languages:
language_output_dir = f"{args.output_dir}/{language}"
create_notebooks(language, language_output_dir)
# Remove empty notebook folders
if not any(Path(language_output_dir).iterdir()):
shutil.rmtree(language_output_dir)
| 5 |
0 | hf_public_repos/audio-transformers-course | hf_public_repos/audio-transformers-course/utils/validate_translation.py | import argparse
import os
import yaml
from pathlib import Path
PATH_TO_COURSE = Path("chapters/")
def load_sections(language: str):
toc = yaml.safe_load(
open(os.path.join(PATH_TO_COURSE / language, "_toctree.yml"), "r")
)
sections = []
for chapter in toc:
for section in chapter["sections"]:
sections.append(section["local"])
return set(sorted(sections))
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--language", type=str, help="Translation language to validate")
args = parser.parse_args()
english_sections = load_sections("en")
translation_sections = load_sections(args.language)
missing_sections = english_sections.difference(translation_sections)
if len(missing_sections) > 0:
print("Missing sections:")
for section in missing_sections:
print(section)
else:
print("✅ No missing sections - translation complete!") | 6 |
0 | hf_public_repos/audio-transformers-course | hf_public_repos/audio-transformers-course/utils/code_formatter.py | import argparse
import black
import os
import re
from pathlib import Path
def blackify(filename, check_only=False):
# Read the content of the file
with open(filename, "r", encoding="utf-8") as f:
content = f.read()
lines = content.split("\n")
# Split the content into code samples in py or python blocks.
code_samples = []
line_index = 0
while line_index < len(lines):
line = lines[line_index]
if line.strip() in ["```py", "```python"]:
line_index += 1
start_index = line_index
while line_index < len(lines) and lines[line_index].strip() != "```":
line_index += 1
code = "\n".join(lines[start_index:line_index])
# Deal with ! instructions
code = re.sub(r"^!", r"## !", code, flags=re.MULTILINE)
code_samples.append({"start_index": start_index, "end_index": line_index - 1, "code": code})
line_index += 1
else:
line_index += 1
# Let's blackify the code! We put everything in one big text to go faster.
delimiter = "\n\n### New cell ###\n"
full_code = delimiter.join([sample["code"] for sample in code_samples])
formatted_code = full_code.replace("\t", " ")
formatted_code = black.format_str(formatted_code, mode=black.FileMode({black.TargetVersion.PY37}, line_length=90))
# Black adds last new lines we don't want, so we strip individual code samples.
cells = formatted_code.split(delimiter)
cells = [cell.strip() for cell in cells]
formatted_code = delimiter.join(cells)
if check_only:
return full_code == formatted_code
elif full_code == formatted_code:
# Nothing to do, all is good
return
formatted_code = re.sub(r"^## !", r"!", formatted_code, flags=re.MULTILINE)
print(f"Formatting {filename}")
# Re-build the content with formatted code
new_lines = []
start_index = 0
for sample, code in zip(code_samples, formatted_code.split(delimiter)):
new_lines.extend(lines[start_index : sample["start_index"]])
new_lines.append(code)
start_index = sample["end_index"] + 1
new_lines.extend(lines[start_index:])
with open(filename, "w", encoding="utf-8") as f:
f.write("\n".join(new_lines))
def format_all_files(check_only=False):
failures = []
for filename in Path("chapters").glob("**/*.mdx"):
try:
same = blackify(filename, check_only=check_only)
if check_only and not same:
failures.append(filename)
except Exception:
print(f"Failed to format {filename}.")
raise
if check_only and len(failures) > 0:
raise ValueError(f"{len(failures)} files need to be formatted, run `make style`.")
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
"--check_only",
action="store_true",
help="Just check files are properly formatted.",
)
args = parser.parse_args()
format_all_files(check_only=args.check_only)
| 7 |
0 | hf_public_repos | hf_public_repos/100-times-faster-nlp/100-times-faster-nlp-in-python.html | <!DOCTYPE html>
<html>
<head><meta charset="utf-8" />
<title>100-times-faster-nlp-in-python</title><script src="https://cdnjs.cloudflare.com/ajax/libs/require.js/2.1.10/require.min.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/jquery/2.0.3/jquery.min.js"></script>
<style type="text/css">
/*!
*
* Twitter Bootstrap
*
*/
/*!
* Bootstrap v3.3.7 (http://getbootstrap.com)
* Copyright 2011-2016 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
*/
/*! normalize.css v3.0.3 | MIT License | github.com/necolas/normalize.css */
html {
font-family: sans-serif;
-ms-text-size-adjust: 100%;
-webkit-text-size-adjust: 100%;
}
body {
margin: 0;
}
article,
aside,
details,
figcaption,
figure,
footer,
header,
hgroup,
main,
menu,
nav,
section,
summary {
display: block;
}
audio,
canvas,
progress,
video {
display: inline-block;
vertical-align: baseline;
}
audio:not([controls]) {
display: none;
height: 0;
}
[hidden],
template {
display: none;
}
a {
background-color: transparent;
}
a:active,
a:hover {
outline: 0;
}
abbr[title] {
border-bottom: 1px dotted;
}
b,
strong {
font-weight: bold;
}
dfn {
font-style: italic;
}
h1 {
font-size: 2em;
margin: 0.67em 0;
}
mark {
background: #ff0;
color: #000;
}
small {
font-size: 80%;
}
sub,
sup {
font-size: 75%;
line-height: 0;
position: relative;
vertical-align: baseline;
}
sup {
top: -0.5em;
}
sub {
bottom: -0.25em;
}
img {
border: 0;
}
svg:not(:root) {
overflow: hidden;
}
figure {
margin: 1em 40px;
}
hr {
box-sizing: content-box;
height: 0;
}
pre {
overflow: auto;
}
code,
kbd,
pre,
samp {
font-family: monospace, monospace;
font-size: 1em;
}
button,
input,
optgroup,
select,
textarea {
color: inherit;
font: inherit;
margin: 0;
}
button {
overflow: visible;
}
button,
select {
text-transform: none;
}
button,
html input[type="button"],
input[type="reset"],
input[type="submit"] {
-webkit-appearance: button;
cursor: pointer;
}
button[disabled],
html input[disabled] {
cursor: default;
}
button::-moz-focus-inner,
input::-moz-focus-inner {
border: 0;
padding: 0;
}
input {
line-height: normal;
}
input[type="checkbox"],
input[type="radio"] {
box-sizing: border-box;
padding: 0;
}
input[type="number"]::-webkit-inner-spin-button,
input[type="number"]::-webkit-outer-spin-button {
height: auto;
}
input[type="search"] {
-webkit-appearance: textfield;
box-sizing: content-box;
}
input[type="search"]::-webkit-search-cancel-button,
input[type="search"]::-webkit-search-decoration {
-webkit-appearance: none;
}
fieldset {
border: 1px solid #c0c0c0;
margin: 0 2px;
padding: 0.35em 0.625em 0.75em;
}
legend {
border: 0;
padding: 0;
}
textarea {
overflow: auto;
}
optgroup {
font-weight: bold;
}
table {
border-collapse: collapse;
border-spacing: 0;
}
td,
th {
padding: 0;
}
/*! Source: https://github.com/h5bp/html5-boilerplate/blob/master/src/css/main.css */
@media print {
*,
*:before,
*:after {
background: transparent !important;
color: #000 !important;
box-shadow: none !important;
text-shadow: none !important;
}
a,
a:visited {
text-decoration: underline;
}
a[href]:after {
content: " (" attr(href) ")";
}
abbr[title]:after {
content: " (" attr(title) ")";
}
a[href^="#"]:after,
a[href^="javascript:"]:after {
content: "";
}
pre,
blockquote {
border: 1px solid #999;
page-break-inside: avoid;
}
thead {
display: table-header-group;
}
tr,
img {
page-break-inside: avoid;
}
img {
max-width: 100% !important;
}
p,
h2,
h3 {
orphans: 3;
widows: 3;
}
h2,
h3 {
page-break-after: avoid;
}
.navbar {
display: none;
}
.btn > .caret,
.dropup > .btn > .caret {
border-top-color: #000 !important;
}
.label {
border: 1px solid #000;
}
.table {
border-collapse: collapse !important;
}
.table td,
.table th {
background-color: #fff !important;
}
.table-bordered th,
.table-bordered td {
border: 1px solid #ddd !important;
}
}
@font-face {
font-family: 'Glyphicons Halflings';
src: url('../components/bootstrap/fonts/glyphicons-halflings-regular.eot');
src: url('../components/bootstrap/fonts/glyphicons-halflings-regular.eot?#iefix') format('embedded-opentype'), url('../components/bootstrap/fonts/glyphicons-halflings-regular.woff2') format('woff2'), url('../components/bootstrap/fonts/glyphicons-halflings-regular.woff') format('woff'), url('../components/bootstrap/fonts/glyphicons-halflings-regular.ttf') format('truetype'), url('../components/bootstrap/fonts/glyphicons-halflings-regular.svg#glyphicons_halflingsregular') format('svg');
}
.glyphicon {
position: relative;
top: 1px;
display: inline-block;
font-family: 'Glyphicons Halflings';
font-style: normal;
font-weight: normal;
line-height: 1;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
}
.glyphicon-asterisk:before {
content: "\002a";
}
.glyphicon-plus:before {
content: "\002b";
}
.glyphicon-euro:before,
.glyphicon-eur:before {
content: "\20ac";
}
.glyphicon-minus:before {
content: "\2212";
}
.glyphicon-cloud:before {
content: "\2601";
}
.glyphicon-envelope:before {
content: "\2709";
}
.glyphicon-pencil:before {
content: "\270f";
}
.glyphicon-glass:before {
content: "\e001";
}
.glyphicon-music:before {
content: "\e002";
}
.glyphicon-search:before {
content: "\e003";
}
.glyphicon-heart:before {
content: "\e005";
}
.glyphicon-star:before {
content: "\e006";
}
.glyphicon-star-empty:before {
content: "\e007";
}
.glyphicon-user:before {
content: "\e008";
}
.glyphicon-film:before {
content: "\e009";
}
.glyphicon-th-large:before {
content: "\e010";
}
.glyphicon-th:before {
content: "\e011";
}
.glyphicon-th-list:before {
content: "\e012";
}
.glyphicon-ok:before {
content: "\e013";
}
.glyphicon-remove:before {
content: "\e014";
}
.glyphicon-zoom-in:before {
content: "\e015";
}
.glyphicon-zoom-out:before {
content: "\e016";
}
.glyphicon-off:before {
content: "\e017";
}
.glyphicon-signal:before {
content: "\e018";
}
.glyphicon-cog:before {
content: "\e019";
}
.glyphicon-trash:before {
content: "\e020";
}
.glyphicon-home:before {
content: "\e021";
}
.glyphicon-file:before {
content: "\e022";
}
.glyphicon-time:before {
content: "\e023";
}
.glyphicon-road:before {
content: "\e024";
}
.glyphicon-download-alt:before {
content: "\e025";
}
.glyphicon-download:before {
content: "\e026";
}
.glyphicon-upload:before {
content: "\e027";
}
.glyphicon-inbox:before {
content: "\e028";
}
.glyphicon-play-circle:before {
content: "\e029";
}
.glyphicon-repeat:before {
content: "\e030";
}
.glyphicon-refresh:before {
content: "\e031";
}
.glyphicon-list-alt:before {
content: "\e032";
}
.glyphicon-lock:before {
content: "\e033";
}
.glyphicon-flag:before {
content: "\e034";
}
.glyphicon-headphones:before {
content: "\e035";
}
.glyphicon-volume-off:before {
content: "\e036";
}
.glyphicon-volume-down:before {
content: "\e037";
}
.glyphicon-volume-up:before {
content: "\e038";
}
.glyphicon-qrcode:before {
content: "\e039";
}
.glyphicon-barcode:before {
content: "\e040";
}
.glyphicon-tag:before {
content: "\e041";
}
.glyphicon-tags:before {
content: "\e042";
}
.glyphicon-book:before {
content: "\e043";
}
.glyphicon-bookmark:before {
content: "\e044";
}
.glyphicon-print:before {
content: "\e045";
}
.glyphicon-camera:before {
content: "\e046";
}
.glyphicon-font:before {
content: "\e047";
}
.glyphicon-bold:before {
content: "\e048";
}
.glyphicon-italic:before {
content: "\e049";
}
.glyphicon-text-height:before {
content: "\e050";
}
.glyphicon-text-width:before {
content: "\e051";
}
.glyphicon-align-left:before {
content: "\e052";
}
.glyphicon-align-center:before {
content: "\e053";
}
.glyphicon-align-right:before {
content: "\e054";
}
.glyphicon-align-justify:before {
content: "\e055";
}
.glyphicon-list:before {
content: "\e056";
}
.glyphicon-indent-left:before {
content: "\e057";
}
.glyphicon-indent-right:before {
content: "\e058";
}
.glyphicon-facetime-video:before {
content: "\e059";
}
.glyphicon-picture:before {
content: "\e060";
}
.glyphicon-map-marker:before {
content: "\e062";
}
.glyphicon-adjust:before {
content: "\e063";
}
.glyphicon-tint:before {
content: "\e064";
}
.glyphicon-edit:before {
content: "\e065";
}
.glyphicon-share:before {
content: "\e066";
}
.glyphicon-check:before {
content: "\e067";
}
.glyphicon-move:before {
content: "\e068";
}
.glyphicon-step-backward:before {
content: "\e069";
}
.glyphicon-fast-backward:before {
content: "\e070";
}
.glyphicon-backward:before {
content: "\e071";
}
.glyphicon-play:before {
content: "\e072";
}
.glyphicon-pause:before {
content: "\e073";
}
.glyphicon-stop:before {
content: "\e074";
}
.glyphicon-forward:before {
content: "\e075";
}
.glyphicon-fast-forward:before {
content: "\e076";
}
.glyphicon-step-forward:before {
content: "\e077";
}
.glyphicon-eject:before {
content: "\e078";
}
.glyphicon-chevron-left:before {
content: "\e079";
}
.glyphicon-chevron-right:before {
content: "\e080";
}
.glyphicon-plus-sign:before {
content: "\e081";
}
.glyphicon-minus-sign:before {
content: "\e082";
}
.glyphicon-remove-sign:before {
content: "\e083";
}
.glyphicon-ok-sign:before {
content: "\e084";
}
.glyphicon-question-sign:before {
content: "\e085";
}
.glyphicon-info-sign:before {
content: "\e086";
}
.glyphicon-screenshot:before {
content: "\e087";
}
.glyphicon-remove-circle:before {
content: "\e088";
}
.glyphicon-ok-circle:before {
content: "\e089";
}
.glyphicon-ban-circle:before {
content: "\e090";
}
.glyphicon-arrow-left:before {
content: "\e091";
}
.glyphicon-arrow-right:before {
content: "\e092";
}
.glyphicon-arrow-up:before {
content: "\e093";
}
.glyphicon-arrow-down:before {
content: "\e094";
}
.glyphicon-share-alt:before {
content: "\e095";
}
.glyphicon-resize-full:before {
content: "\e096";
}
.glyphicon-resize-small:before {
content: "\e097";
}
.glyphicon-exclamation-sign:before {
content: "\e101";
}
.glyphicon-gift:before {
content: "\e102";
}
.glyphicon-leaf:before {
content: "\e103";
}
.glyphicon-fire:before {
content: "\e104";
}
.glyphicon-eye-open:before {
content: "\e105";
}
.glyphicon-eye-close:before {
content: "\e106";
}
.glyphicon-warning-sign:before {
content: "\e107";
}
.glyphicon-plane:before {
content: "\e108";
}
.glyphicon-calendar:before {
content: "\e109";
}
.glyphicon-random:before {
content: "\e110";
}
.glyphicon-comment:before {
content: "\e111";
}
.glyphicon-magnet:before {
content: "\e112";
}
.glyphicon-chevron-up:before {
content: "\e113";
}
.glyphicon-chevron-down:before {
content: "\e114";
}
.glyphicon-retweet:before {
content: "\e115";
}
.glyphicon-shopping-cart:before {
content: "\e116";
}
.glyphicon-folder-close:before {
content: "\e117";
}
.glyphicon-folder-open:before {
content: "\e118";
}
.glyphicon-resize-vertical:before {
content: "\e119";
}
.glyphicon-resize-horizontal:before {
content: "\e120";
}
.glyphicon-hdd:before {
content: "\e121";
}
.glyphicon-bullhorn:before {
content: "\e122";
}
.glyphicon-bell:before {
content: "\e123";
}
.glyphicon-certificate:before {
content: "\e124";
}
.glyphicon-thumbs-up:before {
content: "\e125";
}
.glyphicon-thumbs-down:before {
content: "\e126";
}
.glyphicon-hand-right:before {
content: "\e127";
}
.glyphicon-hand-left:before {
content: "\e128";
}
.glyphicon-hand-up:before {
content: "\e129";
}
.glyphicon-hand-down:before {
content: "\e130";
}
.glyphicon-circle-arrow-right:before {
content: "\e131";
}
.glyphicon-circle-arrow-left:before {
content: "\e132";
}
.glyphicon-circle-arrow-up:before {
content: "\e133";
}
.glyphicon-circle-arrow-down:before {
content: "\e134";
}
.glyphicon-globe:before {
content: "\e135";
}
.glyphicon-wrench:before {
content: "\e136";
}
.glyphicon-tasks:before {
content: "\e137";
}
.glyphicon-filter:before {
content: "\e138";
}
.glyphicon-briefcase:before {
content: "\e139";
}
.glyphicon-fullscreen:before {
content: "\e140";
}
.glyphicon-dashboard:before {
content: "\e141";
}
.glyphicon-paperclip:before {
content: "\e142";
}
.glyphicon-heart-empty:before {
content: "\e143";
}
.glyphicon-link:before {
content: "\e144";
}
.glyphicon-phone:before {
content: "\e145";
}
.glyphicon-pushpin:before {
content: "\e146";
}
.glyphicon-usd:before {
content: "\e148";
}
.glyphicon-gbp:before {
content: "\e149";
}
.glyphicon-sort:before {
content: "\e150";
}
.glyphicon-sort-by-alphabet:before {
content: "\e151";
}
.glyphicon-sort-by-alphabet-alt:before {
content: "\e152";
}
.glyphicon-sort-by-order:before {
content: "\e153";
}
.glyphicon-sort-by-order-alt:before {
content: "\e154";
}
.glyphicon-sort-by-attributes:before {
content: "\e155";
}
.glyphicon-sort-by-attributes-alt:before {
content: "\e156";
}
.glyphicon-unchecked:before {
content: "\e157";
}
.glyphicon-expand:before {
content: "\e158";
}
.glyphicon-collapse-down:before {
content: "\e159";
}
.glyphicon-collapse-up:before {
content: "\e160";
}
.glyphicon-log-in:before {
content: "\e161";
}
.glyphicon-flash:before {
content: "\e162";
}
.glyphicon-log-out:before {
content: "\e163";
}
.glyphicon-new-window:before {
content: "\e164";
}
.glyphicon-record:before {
content: "\e165";
}
.glyphicon-save:before {
content: "\e166";
}
.glyphicon-open:before {
content: "\e167";
}
.glyphicon-saved:before {
content: "\e168";
}
.glyphicon-import:before {
content: "\e169";
}
.glyphicon-export:before {
content: "\e170";
}
.glyphicon-send:before {
content: "\e171";
}
.glyphicon-floppy-disk:before {
content: "\e172";
}
.glyphicon-floppy-saved:before {
content: "\e173";
}
.glyphicon-floppy-remove:before {
content: "\e174";
}
.glyphicon-floppy-save:before {
content: "\e175";
}
.glyphicon-floppy-open:before {
content: "\e176";
}
.glyphicon-credit-card:before {
content: "\e177";
}
.glyphicon-transfer:before {
content: "\e178";
}
.glyphicon-cutlery:before {
content: "\e179";
}
.glyphicon-header:before {
content: "\e180";
}
.glyphicon-compressed:before {
content: "\e181";
}
.glyphicon-earphone:before {
content: "\e182";
}
.glyphicon-phone-alt:before {
content: "\e183";
}
.glyphicon-tower:before {
content: "\e184";
}
.glyphicon-stats:before {
content: "\e185";
}
.glyphicon-sd-video:before {
content: "\e186";
}
.glyphicon-hd-video:before {
content: "\e187";
}
.glyphicon-subtitles:before {
content: "\e188";
}
.glyphicon-sound-stereo:before {
content: "\e189";
}
.glyphicon-sound-dolby:before {
content: "\e190";
}
.glyphicon-sound-5-1:before {
content: "\e191";
}
.glyphicon-sound-6-1:before {
content: "\e192";
}
.glyphicon-sound-7-1:before {
content: "\e193";
}
.glyphicon-copyright-mark:before {
content: "\e194";
}
.glyphicon-registration-mark:before {
content: "\e195";
}
.glyphicon-cloud-download:before {
content: "\e197";
}
.glyphicon-cloud-upload:before {
content: "\e198";
}
.glyphicon-tree-conifer:before {
content: "\e199";
}
.glyphicon-tree-deciduous:before {
content: "\e200";
}
.glyphicon-cd:before {
content: "\e201";
}
.glyphicon-save-file:before {
content: "\e202";
}
.glyphicon-open-file:before {
content: "\e203";
}
.glyphicon-level-up:before {
content: "\e204";
}
.glyphicon-copy:before {
content: "\e205";
}
.glyphicon-paste:before {
content: "\e206";
}
.glyphicon-alert:before {
content: "\e209";
}
.glyphicon-equalizer:before {
content: "\e210";
}
.glyphicon-king:before {
content: "\e211";
}
.glyphicon-queen:before {
content: "\e212";
}
.glyphicon-pawn:before {
content: "\e213";
}
.glyphicon-bishop:before {
content: "\e214";
}
.glyphicon-knight:before {
content: "\e215";
}
.glyphicon-baby-formula:before {
content: "\e216";
}
.glyphicon-tent:before {
content: "\26fa";
}
.glyphicon-blackboard:before {
content: "\e218";
}
.glyphicon-bed:before {
content: "\e219";
}
.glyphicon-apple:before {
content: "\f8ff";
}
.glyphicon-erase:before {
content: "\e221";
}
.glyphicon-hourglass:before {
content: "\231b";
}
.glyphicon-lamp:before {
content: "\e223";
}
.glyphicon-duplicate:before {
content: "\e224";
}
.glyphicon-piggy-bank:before {
content: "\e225";
}
.glyphicon-scissors:before {
content: "\e226";
}
.glyphicon-bitcoin:before {
content: "\e227";
}
.glyphicon-btc:before {
content: "\e227";
}
.glyphicon-xbt:before {
content: "\e227";
}
.glyphicon-yen:before {
content: "\00a5";
}
.glyphicon-jpy:before {
content: "\00a5";
}
.glyphicon-ruble:before {
content: "\20bd";
}
.glyphicon-rub:before {
content: "\20bd";
}
.glyphicon-scale:before {
content: "\e230";
}
.glyphicon-ice-lolly:before {
content: "\e231";
}
.glyphicon-ice-lolly-tasted:before {
content: "\e232";
}
.glyphicon-education:before {
content: "\e233";
}
.glyphicon-option-horizontal:before {
content: "\e234";
}
.glyphicon-option-vertical:before {
content: "\e235";
}
.glyphicon-menu-hamburger:before {
content: "\e236";
}
.glyphicon-modal-window:before {
content: "\e237";
}
.glyphicon-oil:before {
content: "\e238";
}
.glyphicon-grain:before {
content: "\e239";
}
.glyphicon-sunglasses:before {
content: "\e240";
}
.glyphicon-text-size:before {
content: "\e241";
}
.glyphicon-text-color:before {
content: "\e242";
}
.glyphicon-text-background:before {
content: "\e243";
}
.glyphicon-object-align-top:before {
content: "\e244";
}
.glyphicon-object-align-bottom:before {
content: "\e245";
}
.glyphicon-object-align-horizontal:before {
content: "\e246";
}
.glyphicon-object-align-left:before {
content: "\e247";
}
.glyphicon-object-align-vertical:before {
content: "\e248";
}
.glyphicon-object-align-right:before {
content: "\e249";
}
.glyphicon-triangle-right:before {
content: "\e250";
}
.glyphicon-triangle-left:before {
content: "\e251";
}
.glyphicon-triangle-bottom:before {
content: "\e252";
}
.glyphicon-triangle-top:before {
content: "\e253";
}
.glyphicon-console:before {
content: "\e254";
}
.glyphicon-superscript:before {
content: "\e255";
}
.glyphicon-subscript:before {
content: "\e256";
}
.glyphicon-menu-left:before {
content: "\e257";
}
.glyphicon-menu-right:before {
content: "\e258";
}
.glyphicon-menu-down:before {
content: "\e259";
}
.glyphicon-menu-up:before {
content: "\e260";
}
* {
-webkit-box-sizing: border-box;
-moz-box-sizing: border-box;
box-sizing: border-box;
}
*:before,
*:after {
-webkit-box-sizing: border-box;
-moz-box-sizing: border-box;
box-sizing: border-box;
}
html {
font-size: 10px;
-webkit-tap-highlight-color: rgba(0, 0, 0, 0);
}
body {
font-family: "Helvetica Neue", Helvetica, Arial, sans-serif;
font-size: 13px;
line-height: 1.42857143;
color: #000;
background-color: #fff;
}
input,
button,
select,
textarea {
font-family: inherit;
font-size: inherit;
line-height: inherit;
}
a {
color: #337ab7;
text-decoration: none;
}
a:hover,
a:focus {
color: #23527c;
text-decoration: underline;
}
a:focus {
outline: 5px auto -webkit-focus-ring-color;
outline-offset: -2px;
}
figure {
margin: 0;
}
img {
vertical-align: middle;
}
.img-responsive,
.thumbnail > img,
.thumbnail a > img,
.carousel-inner > .item > img,
.carousel-inner > .item > a > img {
display: block;
max-width: 100%;
height: auto;
}
.img-rounded {
border-radius: 3px;
}
.img-thumbnail {
padding: 4px;
line-height: 1.42857143;
background-color: #fff;
border: 1px solid #ddd;
border-radius: 2px;
-webkit-transition: all 0.2s ease-in-out;
-o-transition: all 0.2s ease-in-out;
transition: all 0.2s ease-in-out;
display: inline-block;
max-width: 100%;
height: auto;
}
.img-circle {
border-radius: 50%;
}
hr {
margin-top: 18px;
margin-bottom: 18px;
border: 0;
border-top: 1px solid #eeeeee;
}
.sr-only {
position: absolute;
width: 1px;
height: 1px;
margin: -1px;
padding: 0;
overflow: hidden;
clip: rect(0, 0, 0, 0);
border: 0;
}
.sr-only-focusable:active,
.sr-only-focusable:focus {
position: static;
width: auto;
height: auto;
margin: 0;
overflow: visible;
clip: auto;
}
[role="button"] {
cursor: pointer;
}
h1,
h2,
h3,
h4,
h5,
h6,
.h1,
.h2,
.h3,
.h4,
.h5,
.h6 {
font-family: inherit;
font-weight: 500;
line-height: 1.1;
color: inherit;
}
h1 small,
h2 small,
h3 small,
h4 small,
h5 small,
h6 small,
.h1 small,
.h2 small,
.h3 small,
.h4 small,
.h5 small,
.h6 small,
h1 .small,
h2 .small,
h3 .small,
h4 .small,
h5 .small,
h6 .small,
.h1 .small,
.h2 .small,
.h3 .small,
.h4 .small,
.h5 .small,
.h6 .small {
font-weight: normal;
line-height: 1;
color: #777777;
}
h1,
.h1,
h2,
.h2,
h3,
.h3 {
margin-top: 18px;
margin-bottom: 9px;
}
h1 small,
.h1 small,
h2 small,
.h2 small,
h3 small,
.h3 small,
h1 .small,
.h1 .small,
h2 .small,
.h2 .small,
h3 .small,
.h3 .small {
font-size: 65%;
}
h4,
.h4,
h5,
.h5,
h6,
.h6 {
margin-top: 9px;
margin-bottom: 9px;
}
h4 small,
.h4 small,
h5 small,
.h5 small,
h6 small,
.h6 small,
h4 .small,
.h4 .small,
h5 .small,
.h5 .small,
h6 .small,
.h6 .small {
font-size: 75%;
}
h1,
.h1 {
font-size: 33px;
}
h2,
.h2 {
font-size: 27px;
}
h3,
.h3 {
font-size: 23px;
}
h4,
.h4 {
font-size: 17px;
}
h5,
.h5 {
font-size: 13px;
}
h6,
.h6 {
font-size: 12px;
}
p {
margin: 0 0 9px;
}
.lead {
margin-bottom: 18px;
font-size: 14px;
font-weight: 300;
line-height: 1.4;
}
@media (min-width: 768px) {
.lead {
font-size: 19.5px;
}
}
small,
.small {
font-size: 92%;
}
mark,
.mark {
background-color: #fcf8e3;
padding: .2em;
}
.text-left {
text-align: left;
}
.text-right {
text-align: right;
}
.text-center {
text-align: center;
}
.text-justify {
text-align: justify;
}
.text-nowrap {
white-space: nowrap;
}
.text-lowercase {
text-transform: lowercase;
}
.text-uppercase {
text-transform: uppercase;
}
.text-capitalize {
text-transform: capitalize;
}
.text-muted {
color: #777777;
}
.text-primary {
color: #337ab7;
}
a.text-primary:hover,
a.text-primary:focus {
color: #286090;
}
.text-success {
color: #3c763d;
}
a.text-success:hover,
a.text-success:focus {
color: #2b542c;
}
.text-info {
color: #31708f;
}
a.text-info:hover,
a.text-info:focus {
color: #245269;
}
.text-warning {
color: #8a6d3b;
}
a.text-warning:hover,
a.text-warning:focus {
color: #66512c;
}
.text-danger {
color: #a94442;
}
a.text-danger:hover,
a.text-danger:focus {
color: #843534;
}
.bg-primary {
color: #fff;
background-color: #337ab7;
}
a.bg-primary:hover,
a.bg-primary:focus {
background-color: #286090;
}
.bg-success {
background-color: #dff0d8;
}
a.bg-success:hover,
a.bg-success:focus {
background-color: #c1e2b3;
}
.bg-info {
background-color: #d9edf7;
}
a.bg-info:hover,
a.bg-info:focus {
background-color: #afd9ee;
}
.bg-warning {
background-color: #fcf8e3;
}
a.bg-warning:hover,
a.bg-warning:focus {
background-color: #f7ecb5;
}
.bg-danger {
background-color: #f2dede;
}
a.bg-danger:hover,
a.bg-danger:focus {
background-color: #e4b9b9;
}
.page-header {
padding-bottom: 8px;
margin: 36px 0 18px;
border-bottom: 1px solid #eeeeee;
}
ul,
ol {
margin-top: 0;
margin-bottom: 9px;
}
ul ul,
ol ul,
ul ol,
ol ol {
margin-bottom: 0;
}
.list-unstyled {
padding-left: 0;
list-style: none;
}
.list-inline {
padding-left: 0;
list-style: none;
margin-left: -5px;
}
.list-inline > li {
display: inline-block;
padding-left: 5px;
padding-right: 5px;
}
dl {
margin-top: 0;
margin-bottom: 18px;
}
dt,
dd {
line-height: 1.42857143;
}
dt {
font-weight: bold;
}
dd {
margin-left: 0;
}
@media (min-width: 541px) {
.dl-horizontal dt {
float: left;
width: 160px;
clear: left;
text-align: right;
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
}
.dl-horizontal dd {
margin-left: 180px;
}
}
abbr[title],
abbr[data-original-title] {
cursor: help;
border-bottom: 1px dotted #777777;
}
.initialism {
font-size: 90%;
text-transform: uppercase;
}
blockquote {
padding: 9px 18px;
margin: 0 0 18px;
font-size: inherit;
border-left: 5px solid #eeeeee;
}
blockquote p:last-child,
blockquote ul:last-child,
blockquote ol:last-child {
margin-bottom: 0;
}
blockquote footer,
blockquote small,
blockquote .small {
display: block;
font-size: 80%;
line-height: 1.42857143;
color: #777777;
}
blockquote footer:before,
blockquote small:before,
blockquote .small:before {
content: '\2014 \00A0';
}
.blockquote-reverse,
blockquote.pull-right {
padding-right: 15px;
padding-left: 0;
border-right: 5px solid #eeeeee;
border-left: 0;
text-align: right;
}
.blockquote-reverse footer:before,
blockquote.pull-right footer:before,
.blockquote-reverse small:before,
blockquote.pull-right small:before,
.blockquote-reverse .small:before,
blockquote.pull-right .small:before {
content: '';
}
.blockquote-reverse footer:after,
blockquote.pull-right footer:after,
.blockquote-reverse small:after,
blockquote.pull-right small:after,
.blockquote-reverse .small:after,
blockquote.pull-right .small:after {
content: '\00A0 \2014';
}
address {
margin-bottom: 18px;
font-style: normal;
line-height: 1.42857143;
}
code,
kbd,
pre,
samp {
font-family: monospace;
}
code {
padding: 2px 4px;
font-size: 90%;
color: #c7254e;
background-color: #f9f2f4;
border-radius: 2px;
}
kbd {
padding: 2px 4px;
font-size: 90%;
color: #888;
background-color: transparent;
border-radius: 1px;
box-shadow: inset 0 -1px 0 rgba(0, 0, 0, 0.25);
}
kbd kbd {
padding: 0;
font-size: 100%;
font-weight: bold;
box-shadow: none;
}
pre {
display: block;
padding: 8.5px;
margin: 0 0 9px;
font-size: 12px;
line-height: 1.42857143;
word-break: break-all;
word-wrap: break-word;
color: #333333;
background-color: #f5f5f5;
border: 1px solid #ccc;
border-radius: 2px;
}
pre code {
padding: 0;
font-size: inherit;
color: inherit;
white-space: pre-wrap;
background-color: transparent;
border-radius: 0;
}
.pre-scrollable {
max-height: 340px;
overflow-y: scroll;
}
.container {
margin-right: auto;
margin-left: auto;
padding-left: 0px;
padding-right: 0px;
}
@media (min-width: 768px) {
.container {
width: 768px;
}
}
@media (min-width: 992px) {
.container {
width: 940px;
}
}
@media (min-width: 1200px) {
.container {
width: 1140px;
}
}
.container-fluid {
margin-right: auto;
margin-left: auto;
padding-left: 0px;
padding-right: 0px;
}
.row {
margin-left: 0px;
margin-right: 0px;
}
.col-xs-1, .col-sm-1, .col-md-1, .col-lg-1, .col-xs-2, .col-sm-2, .col-md-2, .col-lg-2, .col-xs-3, .col-sm-3, .col-md-3, .col-lg-3, .col-xs-4, .col-sm-4, .col-md-4, .col-lg-4, .col-xs-5, .col-sm-5, .col-md-5, .col-lg-5, .col-xs-6, .col-sm-6, .col-md-6, .col-lg-6, .col-xs-7, .col-sm-7, .col-md-7, .col-lg-7, .col-xs-8, .col-sm-8, .col-md-8, .col-lg-8, .col-xs-9, .col-sm-9, .col-md-9, .col-lg-9, .col-xs-10, .col-sm-10, .col-md-10, .col-lg-10, .col-xs-11, .col-sm-11, .col-md-11, .col-lg-11, .col-xs-12, .col-sm-12, .col-md-12, .col-lg-12 {
position: relative;
min-height: 1px;
padding-left: 0px;
padding-right: 0px;
}
.col-xs-1, .col-xs-2, .col-xs-3, .col-xs-4, .col-xs-5, .col-xs-6, .col-xs-7, .col-xs-8, .col-xs-9, .col-xs-10, .col-xs-11, .col-xs-12 {
float: left;
}
.col-xs-12 {
width: 100%;
}
.col-xs-11 {
width: 91.66666667%;
}
.col-xs-10 {
width: 83.33333333%;
}
.col-xs-9 {
width: 75%;
}
.col-xs-8 {
width: 66.66666667%;
}
.col-xs-7 {
width: 58.33333333%;
}
.col-xs-6 {
width: 50%;
}
.col-xs-5 {
width: 41.66666667%;
}
.col-xs-4 {
width: 33.33333333%;
}
.col-xs-3 {
width: 25%;
}
.col-xs-2 {
width: 16.66666667%;
}
.col-xs-1 {
width: 8.33333333%;
}
.col-xs-pull-12 {
right: 100%;
}
.col-xs-pull-11 {
right: 91.66666667%;
}
.col-xs-pull-10 {
right: 83.33333333%;
}
.col-xs-pull-9 {
right: 75%;
}
.col-xs-pull-8 {
right: 66.66666667%;
}
.col-xs-pull-7 {
right: 58.33333333%;
}
.col-xs-pull-6 {
right: 50%;
}
.col-xs-pull-5 {
right: 41.66666667%;
}
.col-xs-pull-4 {
right: 33.33333333%;
}
.col-xs-pull-3 {
right: 25%;
}
.col-xs-pull-2 {
right: 16.66666667%;
}
.col-xs-pull-1 {
right: 8.33333333%;
}
.col-xs-pull-0 {
right: auto;
}
.col-xs-push-12 {
left: 100%;
}
.col-xs-push-11 {
left: 91.66666667%;
}
.col-xs-push-10 {
left: 83.33333333%;
}
.col-xs-push-9 {
left: 75%;
}
.col-xs-push-8 {
left: 66.66666667%;
}
.col-xs-push-7 {
left: 58.33333333%;
}
.col-xs-push-6 {
left: 50%;
}
.col-xs-push-5 {
left: 41.66666667%;
}
.col-xs-push-4 {
left: 33.33333333%;
}
.col-xs-push-3 {
left: 25%;
}
.col-xs-push-2 {
left: 16.66666667%;
}
.col-xs-push-1 {
left: 8.33333333%;
}
.col-xs-push-0 {
left: auto;
}
.col-xs-offset-12 {
margin-left: 100%;
}
.col-xs-offset-11 {
margin-left: 91.66666667%;
}
.col-xs-offset-10 {
margin-left: 83.33333333%;
}
.col-xs-offset-9 {
margin-left: 75%;
}
.col-xs-offset-8 {
margin-left: 66.66666667%;
}
.col-xs-offset-7 {
margin-left: 58.33333333%;
}
.col-xs-offset-6 {
margin-left: 50%;
}
.col-xs-offset-5 {
margin-left: 41.66666667%;
}
.col-xs-offset-4 {
margin-left: 33.33333333%;
}
.col-xs-offset-3 {
margin-left: 25%;
}
.col-xs-offset-2 {
margin-left: 16.66666667%;
}
.col-xs-offset-1 {
margin-left: 8.33333333%;
}
.col-xs-offset-0 {
margin-left: 0%;
}
@media (min-width: 768px) {
.col-sm-1, .col-sm-2, .col-sm-3, .col-sm-4, .col-sm-5, .col-sm-6, .col-sm-7, .col-sm-8, .col-sm-9, .col-sm-10, .col-sm-11, .col-sm-12 {
float: left;
}
.col-sm-12 {
width: 100%;
}
.col-sm-11 {
width: 91.66666667%;
}
.col-sm-10 {
width: 83.33333333%;
}
.col-sm-9 {
width: 75%;
}
.col-sm-8 {
width: 66.66666667%;
}
.col-sm-7 {
width: 58.33333333%;
}
.col-sm-6 {
width: 50%;
}
.col-sm-5 {
width: 41.66666667%;
}
.col-sm-4 {
width: 33.33333333%;
}
.col-sm-3 {
width: 25%;
}
.col-sm-2 {
width: 16.66666667%;
}
.col-sm-1 {
width: 8.33333333%;
}
.col-sm-pull-12 {
right: 100%;
}
.col-sm-pull-11 {
right: 91.66666667%;
}
.col-sm-pull-10 {
right: 83.33333333%;
}
.col-sm-pull-9 {
right: 75%;
}
.col-sm-pull-8 {
right: 66.66666667%;
}
.col-sm-pull-7 {
right: 58.33333333%;
}
.col-sm-pull-6 {
right: 50%;
}
.col-sm-pull-5 {
right: 41.66666667%;
}
.col-sm-pull-4 {
right: 33.33333333%;
}
.col-sm-pull-3 {
right: 25%;
}
.col-sm-pull-2 {
right: 16.66666667%;
}
.col-sm-pull-1 {
right: 8.33333333%;
}
.col-sm-pull-0 {
right: auto;
}
.col-sm-push-12 {
left: 100%;
}
.col-sm-push-11 {
left: 91.66666667%;
}
.col-sm-push-10 {
left: 83.33333333%;
}
.col-sm-push-9 {
left: 75%;
}
.col-sm-push-8 {
left: 66.66666667%;
}
.col-sm-push-7 {
left: 58.33333333%;
}
.col-sm-push-6 {
left: 50%;
}
.col-sm-push-5 {
left: 41.66666667%;
}
.col-sm-push-4 {
left: 33.33333333%;
}
.col-sm-push-3 {
left: 25%;
}
.col-sm-push-2 {
left: 16.66666667%;
}
.col-sm-push-1 {
left: 8.33333333%;
}
.col-sm-push-0 {
left: auto;
}
.col-sm-offset-12 {
margin-left: 100%;
}
.col-sm-offset-11 {
margin-left: 91.66666667%;
}
.col-sm-offset-10 {
margin-left: 83.33333333%;
}
.col-sm-offset-9 {
margin-left: 75%;
}
.col-sm-offset-8 {
margin-left: 66.66666667%;
}
.col-sm-offset-7 {
margin-left: 58.33333333%;
}
.col-sm-offset-6 {
margin-left: 50%;
}
.col-sm-offset-5 {
margin-left: 41.66666667%;
}
.col-sm-offset-4 {
margin-left: 33.33333333%;
}
.col-sm-offset-3 {
margin-left: 25%;
}
.col-sm-offset-2 {
margin-left: 16.66666667%;
}
.col-sm-offset-1 {
margin-left: 8.33333333%;
}
.col-sm-offset-0 {
margin-left: 0%;
}
}
@media (min-width: 992px) {
.col-md-1, .col-md-2, .col-md-3, .col-md-4, .col-md-5, .col-md-6, .col-md-7, .col-md-8, .col-md-9, .col-md-10, .col-md-11, .col-md-12 {
float: left;
}
.col-md-12 {
width: 100%;
}
.col-md-11 {
width: 91.66666667%;
}
.col-md-10 {
width: 83.33333333%;
}
.col-md-9 {
width: 75%;
}
.col-md-8 {
width: 66.66666667%;
}
.col-md-7 {
width: 58.33333333%;
}
.col-md-6 {
width: 50%;
}
.col-md-5 {
width: 41.66666667%;
}
.col-md-4 {
width: 33.33333333%;
}
.col-md-3 {
width: 25%;
}
.col-md-2 {
width: 16.66666667%;
}
.col-md-1 {
width: 8.33333333%;
}
.col-md-pull-12 {
right: 100%;
}
.col-md-pull-11 {
right: 91.66666667%;
}
.col-md-pull-10 {
right: 83.33333333%;
}
.col-md-pull-9 {
right: 75%;
}
.col-md-pull-8 {
right: 66.66666667%;
}
.col-md-pull-7 {
right: 58.33333333%;
}
.col-md-pull-6 {
right: 50%;
}
.col-md-pull-5 {
right: 41.66666667%;
}
.col-md-pull-4 {
right: 33.33333333%;
}
.col-md-pull-3 {
right: 25%;
}
.col-md-pull-2 {
right: 16.66666667%;
}
.col-md-pull-1 {
right: 8.33333333%;
}
.col-md-pull-0 {
right: auto;
}
.col-md-push-12 {
left: 100%;
}
.col-md-push-11 {
left: 91.66666667%;
}
.col-md-push-10 {
left: 83.33333333%;
}
.col-md-push-9 {
left: 75%;
}
.col-md-push-8 {
left: 66.66666667%;
}
.col-md-push-7 {
left: 58.33333333%;
}
.col-md-push-6 {
left: 50%;
}
.col-md-push-5 {
left: 41.66666667%;
}
.col-md-push-4 {
left: 33.33333333%;
}
.col-md-push-3 {
left: 25%;
}
.col-md-push-2 {
left: 16.66666667%;
}
.col-md-push-1 {
left: 8.33333333%;
}
.col-md-push-0 {
left: auto;
}
.col-md-offset-12 {
margin-left: 100%;
}
.col-md-offset-11 {
margin-left: 91.66666667%;
}
.col-md-offset-10 {
margin-left: 83.33333333%;
}
.col-md-offset-9 {
margin-left: 75%;
}
.col-md-offset-8 {
margin-left: 66.66666667%;
}
.col-md-offset-7 {
margin-left: 58.33333333%;
}
.col-md-offset-6 {
margin-left: 50%;
}
.col-md-offset-5 {
margin-left: 41.66666667%;
}
.col-md-offset-4 {
margin-left: 33.33333333%;
}
.col-md-offset-3 {
margin-left: 25%;
}
.col-md-offset-2 {
margin-left: 16.66666667%;
}
.col-md-offset-1 {
margin-left: 8.33333333%;
}
.col-md-offset-0 {
margin-left: 0%;
}
}
@media (min-width: 1200px) {
.col-lg-1, .col-lg-2, .col-lg-3, .col-lg-4, .col-lg-5, .col-lg-6, .col-lg-7, .col-lg-8, .col-lg-9, .col-lg-10, .col-lg-11, .col-lg-12 {
float: left;
}
.col-lg-12 {
width: 100%;
}
.col-lg-11 {
width: 91.66666667%;
}
.col-lg-10 {
width: 83.33333333%;
}
.col-lg-9 {
width: 75%;
}
.col-lg-8 {
width: 66.66666667%;
}
.col-lg-7 {
width: 58.33333333%;
}
.col-lg-6 {
width: 50%;
}
.col-lg-5 {
width: 41.66666667%;
}
.col-lg-4 {
width: 33.33333333%;
}
.col-lg-3 {
width: 25%;
}
.col-lg-2 {
width: 16.66666667%;
}
.col-lg-1 {
width: 8.33333333%;
}
.col-lg-pull-12 {
right: 100%;
}
.col-lg-pull-11 {
right: 91.66666667%;
}
.col-lg-pull-10 {
right: 83.33333333%;
}
.col-lg-pull-9 {
right: 75%;
}
.col-lg-pull-8 {
right: 66.66666667%;
}
.col-lg-pull-7 {
right: 58.33333333%;
}
.col-lg-pull-6 {
right: 50%;
}
.col-lg-pull-5 {
right: 41.66666667%;
}
.col-lg-pull-4 {
right: 33.33333333%;
}
.col-lg-pull-3 {
right: 25%;
}
.col-lg-pull-2 {
right: 16.66666667%;
}
.col-lg-pull-1 {
right: 8.33333333%;
}
.col-lg-pull-0 {
right: auto;
}
.col-lg-push-12 {
left: 100%;
}
.col-lg-push-11 {
left: 91.66666667%;
}
.col-lg-push-10 {
left: 83.33333333%;
}
.col-lg-push-9 {
left: 75%;
}
.col-lg-push-8 {
left: 66.66666667%;
}
.col-lg-push-7 {
left: 58.33333333%;
}
.col-lg-push-6 {
left: 50%;
}
.col-lg-push-5 {
left: 41.66666667%;
}
.col-lg-push-4 {
left: 33.33333333%;
}
.col-lg-push-3 {
left: 25%;
}
.col-lg-push-2 {
left: 16.66666667%;
}
.col-lg-push-1 {
left: 8.33333333%;
}
.col-lg-push-0 {
left: auto;
}
.col-lg-offset-12 {
margin-left: 100%;
}
.col-lg-offset-11 {
margin-left: 91.66666667%;
}
.col-lg-offset-10 {
margin-left: 83.33333333%;
}
.col-lg-offset-9 {
margin-left: 75%;
}
.col-lg-offset-8 {
margin-left: 66.66666667%;
}
.col-lg-offset-7 {
margin-left: 58.33333333%;
}
.col-lg-offset-6 {
margin-left: 50%;
}
.col-lg-offset-5 {
margin-left: 41.66666667%;
}
.col-lg-offset-4 {
margin-left: 33.33333333%;
}
.col-lg-offset-3 {
margin-left: 25%;
}
.col-lg-offset-2 {
margin-left: 16.66666667%;
}
.col-lg-offset-1 {
margin-left: 8.33333333%;
}
.col-lg-offset-0 {
margin-left: 0%;
}
}
table {
background-color: transparent;
}
caption {
padding-top: 8px;
padding-bottom: 8px;
color: #777777;
text-align: left;
}
th {
text-align: left;
}
.table {
width: 100%;
max-width: 100%;
margin-bottom: 18px;
}
.table > thead > tr > th,
.table > tbody > tr > th,
.table > tfoot > tr > th,
.table > thead > tr > td,
.table > tbody > tr > td,
.table > tfoot > tr > td {
padding: 8px;
line-height: 1.42857143;
vertical-align: top;
border-top: 1px solid #ddd;
}
.table > thead > tr > th {
vertical-align: bottom;
border-bottom: 2px solid #ddd;
}
.table > caption + thead > tr:first-child > th,
.table > colgroup + thead > tr:first-child > th,
.table > thead:first-child > tr:first-child > th,
.table > caption + thead > tr:first-child > td,
.table > colgroup + thead > tr:first-child > td,
.table > thead:first-child > tr:first-child > td {
border-top: 0;
}
.table > tbody + tbody {
border-top: 2px solid #ddd;
}
.table .table {
background-color: #fff;
}
.table-condensed > thead > tr > th,
.table-condensed > tbody > tr > th,
.table-condensed > tfoot > tr > th,
.table-condensed > thead > tr > td,
.table-condensed > tbody > tr > td,
.table-condensed > tfoot > tr > td {
padding: 5px;
}
.table-bordered {
border: 1px solid #ddd;
}
.table-bordered > thead > tr > th,
.table-bordered > tbody > tr > th,
.table-bordered > tfoot > tr > th,
.table-bordered > thead > tr > td,
.table-bordered > tbody > tr > td,
.table-bordered > tfoot > tr > td {
border: 1px solid #ddd;
}
.table-bordered > thead > tr > th,
.table-bordered > thead > tr > td {
border-bottom-width: 2px;
}
.table-striped > tbody > tr:nth-of-type(odd) {
background-color: #f9f9f9;
}
.table-hover > tbody > tr:hover {
background-color: #f5f5f5;
}
table col[class*="col-"] {
position: static;
float: none;
display: table-column;
}
table td[class*="col-"],
table th[class*="col-"] {
position: static;
float: none;
display: table-cell;
}
.table > thead > tr > td.active,
.table > tbody > tr > td.active,
.table > tfoot > tr > td.active,
.table > thead > tr > th.active,
.table > tbody > tr > th.active,
.table > tfoot > tr > th.active,
.table > thead > tr.active > td,
.table > tbody > tr.active > td,
.table > tfoot > tr.active > td,
.table > thead > tr.active > th,
.table > tbody > tr.active > th,
.table > tfoot > tr.active > th {
background-color: #f5f5f5;
}
.table-hover > tbody > tr > td.active:hover,
.table-hover > tbody > tr > th.active:hover,
.table-hover > tbody > tr.active:hover > td,
.table-hover > tbody > tr:hover > .active,
.table-hover > tbody > tr.active:hover > th {
background-color: #e8e8e8;
}
.table > thead > tr > td.success,
.table > tbody > tr > td.success,
.table > tfoot > tr > td.success,
.table > thead > tr > th.success,
.table > tbody > tr > th.success,
.table > tfoot > tr > th.success,
.table > thead > tr.success > td,
.table > tbody > tr.success > td,
.table > tfoot > tr.success > td,
.table > thead > tr.success > th,
.table > tbody > tr.success > th,
.table > tfoot > tr.success > th {
background-color: #dff0d8;
}
.table-hover > tbody > tr > td.success:hover,
.table-hover > tbody > tr > th.success:hover,
.table-hover > tbody > tr.success:hover > td,
.table-hover > tbody > tr:hover > .success,
.table-hover > tbody > tr.success:hover > th {
background-color: #d0e9c6;
}
.table > thead > tr > td.info,
.table > tbody > tr > td.info,
.table > tfoot > tr > td.info,
.table > thead > tr > th.info,
.table > tbody > tr > th.info,
.table > tfoot > tr > th.info,
.table > thead > tr.info > td,
.table > tbody > tr.info > td,
.table > tfoot > tr.info > td,
.table > thead > tr.info > th,
.table > tbody > tr.info > th,
.table > tfoot > tr.info > th {
background-color: #d9edf7;
}
.table-hover > tbody > tr > td.info:hover,
.table-hover > tbody > tr > th.info:hover,
.table-hover > tbody > tr.info:hover > td,
.table-hover > tbody > tr:hover > .info,
.table-hover > tbody > tr.info:hover > th {
background-color: #c4e3f3;
}
.table > thead > tr > td.warning,
.table > tbody > tr > td.warning,
.table > tfoot > tr > td.warning,
.table > thead > tr > th.warning,
.table > tbody > tr > th.warning,
.table > tfoot > tr > th.warning,
.table > thead > tr.warning > td,
.table > tbody > tr.warning > td,
.table > tfoot > tr.warning > td,
.table > thead > tr.warning > th,
.table > tbody > tr.warning > th,
.table > tfoot > tr.warning > th {
background-color: #fcf8e3;
}
.table-hover > tbody > tr > td.warning:hover,
.table-hover > tbody > tr > th.warning:hover,
.table-hover > tbody > tr.warning:hover > td,
.table-hover > tbody > tr:hover > .warning,
.table-hover > tbody > tr.warning:hover > th {
background-color: #faf2cc;
}
.table > thead > tr > td.danger,
.table > tbody > tr > td.danger,
.table > tfoot > tr > td.danger,
.table > thead > tr > th.danger,
.table > tbody > tr > th.danger,
.table > tfoot > tr > th.danger,
.table > thead > tr.danger > td,
.table > tbody > tr.danger > td,
.table > tfoot > tr.danger > td,
.table > thead > tr.danger > th,
.table > tbody > tr.danger > th,
.table > tfoot > tr.danger > th {
background-color: #f2dede;
}
.table-hover > tbody > tr > td.danger:hover,
.table-hover > tbody > tr > th.danger:hover,
.table-hover > tbody > tr.danger:hover > td,
.table-hover > tbody > tr:hover > .danger,
.table-hover > tbody > tr.danger:hover > th {
background-color: #ebcccc;
}
.table-responsive {
overflow-x: auto;
min-height: 0.01%;
}
@media screen and (max-width: 767px) {
.table-responsive {
width: 100%;
margin-bottom: 13.5px;
overflow-y: hidden;
-ms-overflow-style: -ms-autohiding-scrollbar;
border: 1px solid #ddd;
}
.table-responsive > .table {
margin-bottom: 0;
}
.table-responsive > .table > thead > tr > th,
.table-responsive > .table > tbody > tr > th,
.table-responsive > .table > tfoot > tr > th,
.table-responsive > .table > thead > tr > td,
.table-responsive > .table > tbody > tr > td,
.table-responsive > .table > tfoot > tr > td {
white-space: nowrap;
}
.table-responsive > .table-bordered {
border: 0;
}
.table-responsive > .table-bordered > thead > tr > th:first-child,
.table-responsive > .table-bordered > tbody > tr > th:first-child,
.table-responsive > .table-bordered > tfoot > tr > th:first-child,
.table-responsive > .table-bordered > thead > tr > td:first-child,
.table-responsive > .table-bordered > tbody > tr > td:first-child,
.table-responsive > .table-bordered > tfoot > tr > td:first-child {
border-left: 0;
}
.table-responsive > .table-bordered > thead > tr > th:last-child,
.table-responsive > .table-bordered > tbody > tr > th:last-child,
.table-responsive > .table-bordered > tfoot > tr > th:last-child,
.table-responsive > .table-bordered > thead > tr > td:last-child,
.table-responsive > .table-bordered > tbody > tr > td:last-child,
.table-responsive > .table-bordered > tfoot > tr > td:last-child {
border-right: 0;
}
.table-responsive > .table-bordered > tbody > tr:last-child > th,
.table-responsive > .table-bordered > tfoot > tr:last-child > th,
.table-responsive > .table-bordered > tbody > tr:last-child > td,
.table-responsive > .table-bordered > tfoot > tr:last-child > td {
border-bottom: 0;
}
}
fieldset {
padding: 0;
margin: 0;
border: 0;
min-width: 0;
}
legend {
display: block;
width: 100%;
padding: 0;
margin-bottom: 18px;
font-size: 19.5px;
line-height: inherit;
color: #333333;
border: 0;
border-bottom: 1px solid #e5e5e5;
}
label {
display: inline-block;
max-width: 100%;
margin-bottom: 5px;
font-weight: bold;
}
input[type="search"] {
-webkit-box-sizing: border-box;
-moz-box-sizing: border-box;
box-sizing: border-box;
}
input[type="radio"],
input[type="checkbox"] {
margin: 4px 0 0;
margin-top: 1px \9;
line-height: normal;
}
input[type="file"] {
display: block;
}
input[type="range"] {
display: block;
width: 100%;
}
select[multiple],
select[size] {
height: auto;
}
input[type="file"]:focus,
input[type="radio"]:focus,
input[type="checkbox"]:focus {
outline: 5px auto -webkit-focus-ring-color;
outline-offset: -2px;
}
output {
display: block;
padding-top: 7px;
font-size: 13px;
line-height: 1.42857143;
color: #555555;
}
.form-control {
display: block;
width: 100%;
height: 32px;
padding: 6px 12px;
font-size: 13px;
line-height: 1.42857143;
color: #555555;
background-color: #fff;
background-image: none;
border: 1px solid #ccc;
border-radius: 2px;
-webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075);
box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075);
-webkit-transition: border-color ease-in-out .15s, box-shadow ease-in-out .15s;
-o-transition: border-color ease-in-out .15s, box-shadow ease-in-out .15s;
transition: border-color ease-in-out .15s, box-shadow ease-in-out .15s;
}
.form-control:focus {
border-color: #66afe9;
outline: 0;
-webkit-box-shadow: inset 0 1px 1px rgba(0,0,0,.075), 0 0 8px rgba(102, 175, 233, 0.6);
box-shadow: inset 0 1px 1px rgba(0,0,0,.075), 0 0 8px rgba(102, 175, 233, 0.6);
}
.form-control::-moz-placeholder {
color: #999;
opacity: 1;
}
.form-control:-ms-input-placeholder {
color: #999;
}
.form-control::-webkit-input-placeholder {
color: #999;
}
.form-control::-ms-expand {
border: 0;
background-color: transparent;
}
.form-control[disabled],
.form-control[readonly],
fieldset[disabled] .form-control {
background-color: #eeeeee;
opacity: 1;
}
.form-control[disabled],
fieldset[disabled] .form-control {
cursor: not-allowed;
}
textarea.form-control {
height: auto;
}
input[type="search"] {
-webkit-appearance: none;
}
@media screen and (-webkit-min-device-pixel-ratio: 0) {
input[type="date"].form-control,
input[type="time"].form-control,
input[type="datetime-local"].form-control,
input[type="month"].form-control {
line-height: 32px;
}
input[type="date"].input-sm,
input[type="time"].input-sm,
input[type="datetime-local"].input-sm,
input[type="month"].input-sm,
.input-group-sm input[type="date"],
.input-group-sm input[type="time"],
.input-group-sm input[type="datetime-local"],
.input-group-sm input[type="month"] {
line-height: 30px;
}
input[type="date"].input-lg,
input[type="time"].input-lg,
input[type="datetime-local"].input-lg,
input[type="month"].input-lg,
.input-group-lg input[type="date"],
.input-group-lg input[type="time"],
.input-group-lg input[type="datetime-local"],
.input-group-lg input[type="month"] {
line-height: 45px;
}
}
.form-group {
margin-bottom: 15px;
}
.radio,
.checkbox {
position: relative;
display: block;
margin-top: 10px;
margin-bottom: 10px;
}
.radio label,
.checkbox label {
min-height: 18px;
padding-left: 20px;
margin-bottom: 0;
font-weight: normal;
cursor: pointer;
}
.radio input[type="radio"],
.radio-inline input[type="radio"],
.checkbox input[type="checkbox"],
.checkbox-inline input[type="checkbox"] {
position: absolute;
margin-left: -20px;
margin-top: 4px \9;
}
.radio + .radio,
.checkbox + .checkbox {
margin-top: -5px;
}
.radio-inline,
.checkbox-inline {
position: relative;
display: inline-block;
padding-left: 20px;
margin-bottom: 0;
vertical-align: middle;
font-weight: normal;
cursor: pointer;
}
.radio-inline + .radio-inline,
.checkbox-inline + .checkbox-inline {
margin-top: 0;
margin-left: 10px;
}
input[type="radio"][disabled],
input[type="checkbox"][disabled],
input[type="radio"].disabled,
input[type="checkbox"].disabled,
fieldset[disabled] input[type="radio"],
fieldset[disabled] input[type="checkbox"] {
cursor: not-allowed;
}
.radio-inline.disabled,
.checkbox-inline.disabled,
fieldset[disabled] .radio-inline,
fieldset[disabled] .checkbox-inline {
cursor: not-allowed;
}
.radio.disabled label,
.checkbox.disabled label,
fieldset[disabled] .radio label,
fieldset[disabled] .checkbox label {
cursor: not-allowed;
}
.form-control-static {
padding-top: 7px;
padding-bottom: 7px;
margin-bottom: 0;
min-height: 31px;
}
.form-control-static.input-lg,
.form-control-static.input-sm {
padding-left: 0;
padding-right: 0;
}
.input-sm {
height: 30px;
padding: 5px 10px;
font-size: 12px;
line-height: 1.5;
border-radius: 1px;
}
select.input-sm {
height: 30px;
line-height: 30px;
}
textarea.input-sm,
select[multiple].input-sm {
height: auto;
}
.form-group-sm .form-control {
height: 30px;
padding: 5px 10px;
font-size: 12px;
line-height: 1.5;
border-radius: 1px;
}
.form-group-sm select.form-control {
height: 30px;
line-height: 30px;
}
.form-group-sm textarea.form-control,
.form-group-sm select[multiple].form-control {
height: auto;
}
.form-group-sm .form-control-static {
height: 30px;
min-height: 30px;
padding: 6px 10px;
font-size: 12px;
line-height: 1.5;
}
.input-lg {
height: 45px;
padding: 10px 16px;
font-size: 17px;
line-height: 1.3333333;
border-radius: 3px;
}
select.input-lg {
height: 45px;
line-height: 45px;
}
textarea.input-lg,
select[multiple].input-lg {
height: auto;
}
.form-group-lg .form-control {
height: 45px;
padding: 10px 16px;
font-size: 17px;
line-height: 1.3333333;
border-radius: 3px;
}
.form-group-lg select.form-control {
height: 45px;
line-height: 45px;
}
.form-group-lg textarea.form-control,
.form-group-lg select[multiple].form-control {
height: auto;
}
.form-group-lg .form-control-static {
height: 45px;
min-height: 35px;
padding: 11px 16px;
font-size: 17px;
line-height: 1.3333333;
}
.has-feedback {
position: relative;
}
.has-feedback .form-control {
padding-right: 40px;
}
.form-control-feedback {
position: absolute;
top: 0;
right: 0;
z-index: 2;
display: block;
width: 32px;
height: 32px;
line-height: 32px;
text-align: center;
pointer-events: none;
}
.input-lg + .form-control-feedback,
.input-group-lg + .form-control-feedback,
.form-group-lg .form-control + .form-control-feedback {
width: 45px;
height: 45px;
line-height: 45px;
}
.input-sm + .form-control-feedback,
.input-group-sm + .form-control-feedback,
.form-group-sm .form-control + .form-control-feedback {
width: 30px;
height: 30px;
line-height: 30px;
}
.has-success .help-block,
.has-success .control-label,
.has-success .radio,
.has-success .checkbox,
.has-success .radio-inline,
.has-success .checkbox-inline,
.has-success.radio label,
.has-success.checkbox label,
.has-success.radio-inline label,
.has-success.checkbox-inline label {
color: #3c763d;
}
.has-success .form-control {
border-color: #3c763d;
-webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075);
box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075);
}
.has-success .form-control:focus {
border-color: #2b542c;
-webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 6px #67b168;
box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 6px #67b168;
}
.has-success .input-group-addon {
color: #3c763d;
border-color: #3c763d;
background-color: #dff0d8;
}
.has-success .form-control-feedback {
color: #3c763d;
}
.has-warning .help-block,
.has-warning .control-label,
.has-warning .radio,
.has-warning .checkbox,
.has-warning .radio-inline,
.has-warning .checkbox-inline,
.has-warning.radio label,
.has-warning.checkbox label,
.has-warning.radio-inline label,
.has-warning.checkbox-inline label {
color: #8a6d3b;
}
.has-warning .form-control {
border-color: #8a6d3b;
-webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075);
box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075);
}
.has-warning .form-control:focus {
border-color: #66512c;
-webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 6px #c0a16b;
box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 6px #c0a16b;
}
.has-warning .input-group-addon {
color: #8a6d3b;
border-color: #8a6d3b;
background-color: #fcf8e3;
}
.has-warning .form-control-feedback {
color: #8a6d3b;
}
.has-error .help-block,
.has-error .control-label,
.has-error .radio,
.has-error .checkbox,
.has-error .radio-inline,
.has-error .checkbox-inline,
.has-error.radio label,
.has-error.checkbox label,
.has-error.radio-inline label,
.has-error.checkbox-inline label {
color: #a94442;
}
.has-error .form-control {
border-color: #a94442;
-webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075);
box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075);
}
.has-error .form-control:focus {
border-color: #843534;
-webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 6px #ce8483;
box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 6px #ce8483;
}
.has-error .input-group-addon {
color: #a94442;
border-color: #a94442;
background-color: #f2dede;
}
.has-error .form-control-feedback {
color: #a94442;
}
.has-feedback label ~ .form-control-feedback {
top: 23px;
}
.has-feedback label.sr-only ~ .form-control-feedback {
top: 0;
}
.help-block {
display: block;
margin-top: 5px;
margin-bottom: 10px;
color: #404040;
}
@media (min-width: 768px) {
.form-inline .form-group {
display: inline-block;
margin-bottom: 0;
vertical-align: middle;
}
.form-inline .form-control {
display: inline-block;
width: auto;
vertical-align: middle;
}
.form-inline .form-control-static {
display: inline-block;
}
.form-inline .input-group {
display: inline-table;
vertical-align: middle;
}
.form-inline .input-group .input-group-addon,
.form-inline .input-group .input-group-btn,
.form-inline .input-group .form-control {
width: auto;
}
.form-inline .input-group > .form-control {
width: 100%;
}
.form-inline .control-label {
margin-bottom: 0;
vertical-align: middle;
}
.form-inline .radio,
.form-inline .checkbox {
display: inline-block;
margin-top: 0;
margin-bottom: 0;
vertical-align: middle;
}
.form-inline .radio label,
.form-inline .checkbox label {
padding-left: 0;
}
.form-inline .radio input[type="radio"],
.form-inline .checkbox input[type="checkbox"] {
position: relative;
margin-left: 0;
}
.form-inline .has-feedback .form-control-feedback {
top: 0;
}
}
.form-horizontal .radio,
.form-horizontal .checkbox,
.form-horizontal .radio-inline,
.form-horizontal .checkbox-inline {
margin-top: 0;
margin-bottom: 0;
padding-top: 7px;
}
.form-horizontal .radio,
.form-horizontal .checkbox {
min-height: 25px;
}
.form-horizontal .form-group {
margin-left: 0px;
margin-right: 0px;
}
@media (min-width: 768px) {
.form-horizontal .control-label {
text-align: right;
margin-bottom: 0;
padding-top: 7px;
}
}
.form-horizontal .has-feedback .form-control-feedback {
right: 0px;
}
@media (min-width: 768px) {
.form-horizontal .form-group-lg .control-label {
padding-top: 11px;
font-size: 17px;
}
}
@media (min-width: 768px) {
.form-horizontal .form-group-sm .control-label {
padding-top: 6px;
font-size: 12px;
}
}
.btn {
display: inline-block;
margin-bottom: 0;
font-weight: normal;
text-align: center;
vertical-align: middle;
touch-action: manipulation;
cursor: pointer;
background-image: none;
border: 1px solid transparent;
white-space: nowrap;
padding: 6px 12px;
font-size: 13px;
line-height: 1.42857143;
border-radius: 2px;
-webkit-user-select: none;
-moz-user-select: none;
-ms-user-select: none;
user-select: none;
}
.btn:focus,
.btn:active:focus,
.btn.active:focus,
.btn.focus,
.btn:active.focus,
.btn.active.focus {
outline: 5px auto -webkit-focus-ring-color;
outline-offset: -2px;
}
.btn:hover,
.btn:focus,
.btn.focus {
color: #333;
text-decoration: none;
}
.btn:active,
.btn.active {
outline: 0;
background-image: none;
-webkit-box-shadow: inset 0 3px 5px rgba(0, 0, 0, 0.125);
box-shadow: inset 0 3px 5px rgba(0, 0, 0, 0.125);
}
.btn.disabled,
.btn[disabled],
fieldset[disabled] .btn {
cursor: not-allowed;
opacity: 0.65;
filter: alpha(opacity=65);
-webkit-box-shadow: none;
box-shadow: none;
}
a.btn.disabled,
fieldset[disabled] a.btn {
pointer-events: none;
}
.btn-default {
color: #333;
background-color: #fff;
border-color: #ccc;
}
.btn-default:focus,
.btn-default.focus {
color: #333;
background-color: #e6e6e6;
border-color: #8c8c8c;
}
.btn-default:hover {
color: #333;
background-color: #e6e6e6;
border-color: #adadad;
}
.btn-default:active,
.btn-default.active,
.open > .dropdown-toggle.btn-default {
color: #333;
background-color: #e6e6e6;
border-color: #adadad;
}
.btn-default:active:hover,
.btn-default.active:hover,
.open > .dropdown-toggle.btn-default:hover,
.btn-default:active:focus,
.btn-default.active:focus,
.open > .dropdown-toggle.btn-default:focus,
.btn-default:active.focus,
.btn-default.active.focus,
.open > .dropdown-toggle.btn-default.focus {
color: #333;
background-color: #d4d4d4;
border-color: #8c8c8c;
}
.btn-default:active,
.btn-default.active,
.open > .dropdown-toggle.btn-default {
background-image: none;
}
.btn-default.disabled:hover,
.btn-default[disabled]:hover,
fieldset[disabled] .btn-default:hover,
.btn-default.disabled:focus,
.btn-default[disabled]:focus,
fieldset[disabled] .btn-default:focus,
.btn-default.disabled.focus,
.btn-default[disabled].focus,
fieldset[disabled] .btn-default.focus {
background-color: #fff;
border-color: #ccc;
}
.btn-default .badge {
color: #fff;
background-color: #333;
}
.btn-primary {
color: #fff;
background-color: #337ab7;
border-color: #2e6da4;
}
.btn-primary:focus,
.btn-primary.focus {
color: #fff;
background-color: #286090;
border-color: #122b40;
}
.btn-primary:hover {
color: #fff;
background-color: #286090;
border-color: #204d74;
}
.btn-primary:active,
.btn-primary.active,
.open > .dropdown-toggle.btn-primary {
color: #fff;
background-color: #286090;
border-color: #204d74;
}
.btn-primary:active:hover,
.btn-primary.active:hover,
.open > .dropdown-toggle.btn-primary:hover,
.btn-primary:active:focus,
.btn-primary.active:focus,
.open > .dropdown-toggle.btn-primary:focus,
.btn-primary:active.focus,
.btn-primary.active.focus,
.open > .dropdown-toggle.btn-primary.focus {
color: #fff;
background-color: #204d74;
border-color: #122b40;
}
.btn-primary:active,
.btn-primary.active,
.open > .dropdown-toggle.btn-primary {
background-image: none;
}
.btn-primary.disabled:hover,
.btn-primary[disabled]:hover,
fieldset[disabled] .btn-primary:hover,
.btn-primary.disabled:focus,
.btn-primary[disabled]:focus,
fieldset[disabled] .btn-primary:focus,
.btn-primary.disabled.focus,
.btn-primary[disabled].focus,
fieldset[disabled] .btn-primary.focus {
background-color: #337ab7;
border-color: #2e6da4;
}
.btn-primary .badge {
color: #337ab7;
background-color: #fff;
}
.btn-success {
color: #fff;
background-color: #5cb85c;
border-color: #4cae4c;
}
.btn-success:focus,
.btn-success.focus {
color: #fff;
background-color: #449d44;
border-color: #255625;
}
.btn-success:hover {
color: #fff;
background-color: #449d44;
border-color: #398439;
}
.btn-success:active,
.btn-success.active,
.open > .dropdown-toggle.btn-success {
color: #fff;
background-color: #449d44;
border-color: #398439;
}
.btn-success:active:hover,
.btn-success.active:hover,
.open > .dropdown-toggle.btn-success:hover,
.btn-success:active:focus,
.btn-success.active:focus,
.open > .dropdown-toggle.btn-success:focus,
.btn-success:active.focus,
.btn-success.active.focus,
.open > .dropdown-toggle.btn-success.focus {
color: #fff;
background-color: #398439;
border-color: #255625;
}
.btn-success:active,
.btn-success.active,
.open > .dropdown-toggle.btn-success {
background-image: none;
}
.btn-success.disabled:hover,
.btn-success[disabled]:hover,
fieldset[disabled] .btn-success:hover,
.btn-success.disabled:focus,
.btn-success[disabled]:focus,
fieldset[disabled] .btn-success:focus,
.btn-success.disabled.focus,
.btn-success[disabled].focus,
fieldset[disabled] .btn-success.focus {
background-color: #5cb85c;
border-color: #4cae4c;
}
.btn-success .badge {
color: #5cb85c;
background-color: #fff;
}
.btn-info {
color: #fff;
background-color: #5bc0de;
border-color: #46b8da;
}
.btn-info:focus,
.btn-info.focus {
color: #fff;
background-color: #31b0d5;
border-color: #1b6d85;
}
.btn-info:hover {
color: #fff;
background-color: #31b0d5;
border-color: #269abc;
}
.btn-info:active,
.btn-info.active,
.open > .dropdown-toggle.btn-info {
color: #fff;
background-color: #31b0d5;
border-color: #269abc;
}
.btn-info:active:hover,
.btn-info.active:hover,
.open > .dropdown-toggle.btn-info:hover,
.btn-info:active:focus,
.btn-info.active:focus,
.open > .dropdown-toggle.btn-info:focus,
.btn-info:active.focus,
.btn-info.active.focus,
.open > .dropdown-toggle.btn-info.focus {
color: #fff;
background-color: #269abc;
border-color: #1b6d85;
}
.btn-info:active,
.btn-info.active,
.open > .dropdown-toggle.btn-info {
background-image: none;
}
.btn-info.disabled:hover,
.btn-info[disabled]:hover,
fieldset[disabled] .btn-info:hover,
.btn-info.disabled:focus,
.btn-info[disabled]:focus,
fieldset[disabled] .btn-info:focus,
.btn-info.disabled.focus,
.btn-info[disabled].focus,
fieldset[disabled] .btn-info.focus {
background-color: #5bc0de;
border-color: #46b8da;
}
.btn-info .badge {
color: #5bc0de;
background-color: #fff;
}
.btn-warning {
color: #fff;
background-color: #f0ad4e;
border-color: #eea236;
}
.btn-warning:focus,
.btn-warning.focus {
color: #fff;
background-color: #ec971f;
border-color: #985f0d;
}
.btn-warning:hover {
color: #fff;
background-color: #ec971f;
border-color: #d58512;
}
.btn-warning:active,
.btn-warning.active,
.open > .dropdown-toggle.btn-warning {
color: #fff;
background-color: #ec971f;
border-color: #d58512;
}
.btn-warning:active:hover,
.btn-warning.active:hover,
.open > .dropdown-toggle.btn-warning:hover,
.btn-warning:active:focus,
.btn-warning.active:focus,
.open > .dropdown-toggle.btn-warning:focus,
.btn-warning:active.focus,
.btn-warning.active.focus,
.open > .dropdown-toggle.btn-warning.focus {
color: #fff;
background-color: #d58512;
border-color: #985f0d;
}
.btn-warning:active,
.btn-warning.active,
.open > .dropdown-toggle.btn-warning {
background-image: none;
}
.btn-warning.disabled:hover,
.btn-warning[disabled]:hover,
fieldset[disabled] .btn-warning:hover,
.btn-warning.disabled:focus,
.btn-warning[disabled]:focus,
fieldset[disabled] .btn-warning:focus,
.btn-warning.disabled.focus,
.btn-warning[disabled].focus,
fieldset[disabled] .btn-warning.focus {
background-color: #f0ad4e;
border-color: #eea236;
}
.btn-warning .badge {
color: #f0ad4e;
background-color: #fff;
}
.btn-danger {
color: #fff;
background-color: #d9534f;
border-color: #d43f3a;
}
.btn-danger:focus,
.btn-danger.focus {
color: #fff;
background-color: #c9302c;
border-color: #761c19;
}
.btn-danger:hover {
color: #fff;
background-color: #c9302c;
border-color: #ac2925;
}
.btn-danger:active,
.btn-danger.active,
.open > .dropdown-toggle.btn-danger {
color: #fff;
background-color: #c9302c;
border-color: #ac2925;
}
.btn-danger:active:hover,
.btn-danger.active:hover,
.open > .dropdown-toggle.btn-danger:hover,
.btn-danger:active:focus,
.btn-danger.active:focus,
.open > .dropdown-toggle.btn-danger:focus,
.btn-danger:active.focus,
.btn-danger.active.focus,
.open > .dropdown-toggle.btn-danger.focus {
color: #fff;
background-color: #ac2925;
border-color: #761c19;
}
.btn-danger:active,
.btn-danger.active,
.open > .dropdown-toggle.btn-danger {
background-image: none;
}
.btn-danger.disabled:hover,
.btn-danger[disabled]:hover,
fieldset[disabled] .btn-danger:hover,
.btn-danger.disabled:focus,
.btn-danger[disabled]:focus,
fieldset[disabled] .btn-danger:focus,
.btn-danger.disabled.focus,
.btn-danger[disabled].focus,
fieldset[disabled] .btn-danger.focus {
background-color: #d9534f;
border-color: #d43f3a;
}
.btn-danger .badge {
color: #d9534f;
background-color: #fff;
}
.btn-link {
color: #337ab7;
font-weight: normal;
border-radius: 0;
}
.btn-link,
.btn-link:active,
.btn-link.active,
.btn-link[disabled],
fieldset[disabled] .btn-link {
background-color: transparent;
-webkit-box-shadow: none;
box-shadow: none;
}
.btn-link,
.btn-link:hover,
.btn-link:focus,
.btn-link:active {
border-color: transparent;
}
.btn-link:hover,
.btn-link:focus {
color: #23527c;
text-decoration: underline;
background-color: transparent;
}
.btn-link[disabled]:hover,
fieldset[disabled] .btn-link:hover,
.btn-link[disabled]:focus,
fieldset[disabled] .btn-link:focus {
color: #777777;
text-decoration: none;
}
.btn-lg,
.btn-group-lg > .btn {
padding: 10px 16px;
font-size: 17px;
line-height: 1.3333333;
border-radius: 3px;
}
.btn-sm,
.btn-group-sm > .btn {
padding: 5px 10px;
font-size: 12px;
line-height: 1.5;
border-radius: 1px;
}
.btn-xs,
.btn-group-xs > .btn {
padding: 1px 5px;
font-size: 12px;
line-height: 1.5;
border-radius: 1px;
}
.btn-block {
display: block;
width: 100%;
}
.btn-block + .btn-block {
margin-top: 5px;
}
input[type="submit"].btn-block,
input[type="reset"].btn-block,
input[type="button"].btn-block {
width: 100%;
}
.fade {
opacity: 0;
-webkit-transition: opacity 0.15s linear;
-o-transition: opacity 0.15s linear;
transition: opacity 0.15s linear;
}
.fade.in {
opacity: 1;
}
.collapse {
display: none;
}
.collapse.in {
display: block;
}
tr.collapse.in {
display: table-row;
}
tbody.collapse.in {
display: table-row-group;
}
.collapsing {
position: relative;
height: 0;
overflow: hidden;
-webkit-transition-property: height, visibility;
transition-property: height, visibility;
-webkit-transition-duration: 0.35s;
transition-duration: 0.35s;
-webkit-transition-timing-function: ease;
transition-timing-function: ease;
}
.caret {
display: inline-block;
width: 0;
height: 0;
margin-left: 2px;
vertical-align: middle;
border-top: 4px dashed;
border-top: 4px solid \9;
border-right: 4px solid transparent;
border-left: 4px solid transparent;
}
.dropup,
.dropdown {
position: relative;
}
.dropdown-toggle:focus {
outline: 0;
}
.dropdown-menu {
position: absolute;
top: 100%;
left: 0;
z-index: 1000;
display: none;
float: left;
min-width: 160px;
padding: 5px 0;
margin: 2px 0 0;
list-style: none;
font-size: 13px;
text-align: left;
background-color: #fff;
border: 1px solid #ccc;
border: 1px solid rgba(0, 0, 0, 0.15);
border-radius: 2px;
-webkit-box-shadow: 0 6px 12px rgba(0, 0, 0, 0.175);
box-shadow: 0 6px 12px rgba(0, 0, 0, 0.175);
background-clip: padding-box;
}
.dropdown-menu.pull-right {
right: 0;
left: auto;
}
.dropdown-menu .divider {
height: 1px;
margin: 8px 0;
overflow: hidden;
background-color: #e5e5e5;
}
.dropdown-menu > li > a {
display: block;
padding: 3px 20px;
clear: both;
font-weight: normal;
line-height: 1.42857143;
color: #333333;
white-space: nowrap;
}
.dropdown-menu > li > a:hover,
.dropdown-menu > li > a:focus {
text-decoration: none;
color: #262626;
background-color: #f5f5f5;
}
.dropdown-menu > .active > a,
.dropdown-menu > .active > a:hover,
.dropdown-menu > .active > a:focus {
color: #fff;
text-decoration: none;
outline: 0;
background-color: #337ab7;
}
.dropdown-menu > .disabled > a,
.dropdown-menu > .disabled > a:hover,
.dropdown-menu > .disabled > a:focus {
color: #777777;
}
.dropdown-menu > .disabled > a:hover,
.dropdown-menu > .disabled > a:focus {
text-decoration: none;
background-color: transparent;
background-image: none;
filter: progid:DXImageTransform.Microsoft.gradient(enabled = false);
cursor: not-allowed;
}
.open > .dropdown-menu {
display: block;
}
.open > a {
outline: 0;
}
.dropdown-menu-right {
left: auto;
right: 0;
}
.dropdown-menu-left {
left: 0;
right: auto;
}
.dropdown-header {
display: block;
padding: 3px 20px;
font-size: 12px;
line-height: 1.42857143;
color: #777777;
white-space: nowrap;
}
.dropdown-backdrop {
position: fixed;
left: 0;
right: 0;
bottom: 0;
top: 0;
z-index: 990;
}
.pull-right > .dropdown-menu {
right: 0;
left: auto;
}
.dropup .caret,
.navbar-fixed-bottom .dropdown .caret {
border-top: 0;
border-bottom: 4px dashed;
border-bottom: 4px solid \9;
content: "";
}
.dropup .dropdown-menu,
.navbar-fixed-bottom .dropdown .dropdown-menu {
top: auto;
bottom: 100%;
margin-bottom: 2px;
}
@media (min-width: 541px) {
.navbar-right .dropdown-menu {
left: auto;
right: 0;
}
.navbar-right .dropdown-menu-left {
left: 0;
right: auto;
}
}
.btn-group,
.btn-group-vertical {
position: relative;
display: inline-block;
vertical-align: middle;
}
.btn-group > .btn,
.btn-group-vertical > .btn {
position: relative;
float: left;
}
.btn-group > .btn:hover,
.btn-group-vertical > .btn:hover,
.btn-group > .btn:focus,
.btn-group-vertical > .btn:focus,
.btn-group > .btn:active,
.btn-group-vertical > .btn:active,
.btn-group > .btn.active,
.btn-group-vertical > .btn.active {
z-index: 2;
}
.btn-group .btn + .btn,
.btn-group .btn + .btn-group,
.btn-group .btn-group + .btn,
.btn-group .btn-group + .btn-group {
margin-left: -1px;
}
.btn-toolbar {
margin-left: -5px;
}
.btn-toolbar .btn,
.btn-toolbar .btn-group,
.btn-toolbar .input-group {
float: left;
}
.btn-toolbar > .btn,
.btn-toolbar > .btn-group,
.btn-toolbar > .input-group {
margin-left: 5px;
}
.btn-group > .btn:not(:first-child):not(:last-child):not(.dropdown-toggle) {
border-radius: 0;
}
.btn-group > .btn:first-child {
margin-left: 0;
}
.btn-group > .btn:first-child:not(:last-child):not(.dropdown-toggle) {
border-bottom-right-radius: 0;
border-top-right-radius: 0;
}
.btn-group > .btn:last-child:not(:first-child),
.btn-group > .dropdown-toggle:not(:first-child) {
border-bottom-left-radius: 0;
border-top-left-radius: 0;
}
.btn-group > .btn-group {
float: left;
}
.btn-group > .btn-group:not(:first-child):not(:last-child) > .btn {
border-radius: 0;
}
.btn-group > .btn-group:first-child:not(:last-child) > .btn:last-child,
.btn-group > .btn-group:first-child:not(:last-child) > .dropdown-toggle {
border-bottom-right-radius: 0;
border-top-right-radius: 0;
}
.btn-group > .btn-group:last-child:not(:first-child) > .btn:first-child {
border-bottom-left-radius: 0;
border-top-left-radius: 0;
}
.btn-group .dropdown-toggle:active,
.btn-group.open .dropdown-toggle {
outline: 0;
}
.btn-group > .btn + .dropdown-toggle {
padding-left: 8px;
padding-right: 8px;
}
.btn-group > .btn-lg + .dropdown-toggle {
padding-left: 12px;
padding-right: 12px;
}
.btn-group.open .dropdown-toggle {
-webkit-box-shadow: inset 0 3px 5px rgba(0, 0, 0, 0.125);
box-shadow: inset 0 3px 5px rgba(0, 0, 0, 0.125);
}
.btn-group.open .dropdown-toggle.btn-link {
-webkit-box-shadow: none;
box-shadow: none;
}
.btn .caret {
margin-left: 0;
}
.btn-lg .caret {
border-width: 5px 5px 0;
border-bottom-width: 0;
}
.dropup .btn-lg .caret {
border-width: 0 5px 5px;
}
.btn-group-vertical > .btn,
.btn-group-vertical > .btn-group,
.btn-group-vertical > .btn-group > .btn {
display: block;
float: none;
width: 100%;
max-width: 100%;
}
.btn-group-vertical > .btn-group > .btn {
float: none;
}
.btn-group-vertical > .btn + .btn,
.btn-group-vertical > .btn + .btn-group,
.btn-group-vertical > .btn-group + .btn,
.btn-group-vertical > .btn-group + .btn-group {
margin-top: -1px;
margin-left: 0;
}
.btn-group-vertical > .btn:not(:first-child):not(:last-child) {
border-radius: 0;
}
.btn-group-vertical > .btn:first-child:not(:last-child) {
border-top-right-radius: 2px;
border-top-left-radius: 2px;
border-bottom-right-radius: 0;
border-bottom-left-radius: 0;
}
.btn-group-vertical > .btn:last-child:not(:first-child) {
border-top-right-radius: 0;
border-top-left-radius: 0;
border-bottom-right-radius: 2px;
border-bottom-left-radius: 2px;
}
.btn-group-vertical > .btn-group:not(:first-child):not(:last-child) > .btn {
border-radius: 0;
}
.btn-group-vertical > .btn-group:first-child:not(:last-child) > .btn:last-child,
.btn-group-vertical > .btn-group:first-child:not(:last-child) > .dropdown-toggle {
border-bottom-right-radius: 0;
border-bottom-left-radius: 0;
}
.btn-group-vertical > .btn-group:last-child:not(:first-child) > .btn:first-child {
border-top-right-radius: 0;
border-top-left-radius: 0;
}
.btn-group-justified {
display: table;
width: 100%;
table-layout: fixed;
border-collapse: separate;
}
.btn-group-justified > .btn,
.btn-group-justified > .btn-group {
float: none;
display: table-cell;
width: 1%;
}
.btn-group-justified > .btn-group .btn {
width: 100%;
}
.btn-group-justified > .btn-group .dropdown-menu {
left: auto;
}
[data-toggle="buttons"] > .btn input[type="radio"],
[data-toggle="buttons"] > .btn-group > .btn input[type="radio"],
[data-toggle="buttons"] > .btn input[type="checkbox"],
[data-toggle="buttons"] > .btn-group > .btn input[type="checkbox"] {
position: absolute;
clip: rect(0, 0, 0, 0);
pointer-events: none;
}
.input-group {
position: relative;
display: table;
border-collapse: separate;
}
.input-group[class*="col-"] {
float: none;
padding-left: 0;
padding-right: 0;
}
.input-group .form-control {
position: relative;
z-index: 2;
float: left;
width: 100%;
margin-bottom: 0;
}
.input-group .form-control:focus {
z-index: 3;
}
.input-group-lg > .form-control,
.input-group-lg > .input-group-addon,
.input-group-lg > .input-group-btn > .btn {
height: 45px;
padding: 10px 16px;
font-size: 17px;
line-height: 1.3333333;
border-radius: 3px;
}
select.input-group-lg > .form-control,
select.input-group-lg > .input-group-addon,
select.input-group-lg > .input-group-btn > .btn {
height: 45px;
line-height: 45px;
}
textarea.input-group-lg > .form-control,
textarea.input-group-lg > .input-group-addon,
textarea.input-group-lg > .input-group-btn > .btn,
select[multiple].input-group-lg > .form-control,
select[multiple].input-group-lg > .input-group-addon,
select[multiple].input-group-lg > .input-group-btn > .btn {
height: auto;
}
.input-group-sm > .form-control,
.input-group-sm > .input-group-addon,
.input-group-sm > .input-group-btn > .btn {
height: 30px;
padding: 5px 10px;
font-size: 12px;
line-height: 1.5;
border-radius: 1px;
}
select.input-group-sm > .form-control,
select.input-group-sm > .input-group-addon,
select.input-group-sm > .input-group-btn > .btn {
height: 30px;
line-height: 30px;
}
textarea.input-group-sm > .form-control,
textarea.input-group-sm > .input-group-addon,
textarea.input-group-sm > .input-group-btn > .btn,
select[multiple].input-group-sm > .form-control,
select[multiple].input-group-sm > .input-group-addon,
select[multiple].input-group-sm > .input-group-btn > .btn {
height: auto;
}
.input-group-addon,
.input-group-btn,
.input-group .form-control {
display: table-cell;
}
.input-group-addon:not(:first-child):not(:last-child),
.input-group-btn:not(:first-child):not(:last-child),
.input-group .form-control:not(:first-child):not(:last-child) {
border-radius: 0;
}
.input-group-addon,
.input-group-btn {
width: 1%;
white-space: nowrap;
vertical-align: middle;
}
.input-group-addon {
padding: 6px 12px;
font-size: 13px;
font-weight: normal;
line-height: 1;
color: #555555;
text-align: center;
background-color: #eeeeee;
border: 1px solid #ccc;
border-radius: 2px;
}
.input-group-addon.input-sm {
padding: 5px 10px;
font-size: 12px;
border-radius: 1px;
}
.input-group-addon.input-lg {
padding: 10px 16px;
font-size: 17px;
border-radius: 3px;
}
.input-group-addon input[type="radio"],
.input-group-addon input[type="checkbox"] {
margin-top: 0;
}
.input-group .form-control:first-child,
.input-group-addon:first-child,
.input-group-btn:first-child > .btn,
.input-group-btn:first-child > .btn-group > .btn,
.input-group-btn:first-child > .dropdown-toggle,
.input-group-btn:last-child > .btn:not(:last-child):not(.dropdown-toggle),
.input-group-btn:last-child > .btn-group:not(:last-child) > .btn {
border-bottom-right-radius: 0;
border-top-right-radius: 0;
}
.input-group-addon:first-child {
border-right: 0;
}
.input-group .form-control:last-child,
.input-group-addon:last-child,
.input-group-btn:last-child > .btn,
.input-group-btn:last-child > .btn-group > .btn,
.input-group-btn:last-child > .dropdown-toggle,
.input-group-btn:first-child > .btn:not(:first-child),
.input-group-btn:first-child > .btn-group:not(:first-child) > .btn {
border-bottom-left-radius: 0;
border-top-left-radius: 0;
}
.input-group-addon:last-child {
border-left: 0;
}
.input-group-btn {
position: relative;
font-size: 0;
white-space: nowrap;
}
.input-group-btn > .btn {
position: relative;
}
.input-group-btn > .btn + .btn {
margin-left: -1px;
}
.input-group-btn > .btn:hover,
.input-group-btn > .btn:focus,
.input-group-btn > .btn:active {
z-index: 2;
}
.input-group-btn:first-child > .btn,
.input-group-btn:first-child > .btn-group {
margin-right: -1px;
}
.input-group-btn:last-child > .btn,
.input-group-btn:last-child > .btn-group {
z-index: 2;
margin-left: -1px;
}
.nav {
margin-bottom: 0;
padding-left: 0;
list-style: none;
}
.nav > li {
position: relative;
display: block;
}
.nav > li > a {
position: relative;
display: block;
padding: 10px 15px;
}
.nav > li > a:hover,
.nav > li > a:focus {
text-decoration: none;
background-color: #eeeeee;
}
.nav > li.disabled > a {
color: #777777;
}
.nav > li.disabled > a:hover,
.nav > li.disabled > a:focus {
color: #777777;
text-decoration: none;
background-color: transparent;
cursor: not-allowed;
}
.nav .open > a,
.nav .open > a:hover,
.nav .open > a:focus {
background-color: #eeeeee;
border-color: #337ab7;
}
.nav .nav-divider {
height: 1px;
margin: 8px 0;
overflow: hidden;
background-color: #e5e5e5;
}
.nav > li > a > img {
max-width: none;
}
.nav-tabs {
border-bottom: 1px solid #ddd;
}
.nav-tabs > li {
float: left;
margin-bottom: -1px;
}
.nav-tabs > li > a {
margin-right: 2px;
line-height: 1.42857143;
border: 1px solid transparent;
border-radius: 2px 2px 0 0;
}
.nav-tabs > li > a:hover {
border-color: #eeeeee #eeeeee #ddd;
}
.nav-tabs > li.active > a,
.nav-tabs > li.active > a:hover,
.nav-tabs > li.active > a:focus {
color: #555555;
background-color: #fff;
border: 1px solid #ddd;
border-bottom-color: transparent;
cursor: default;
}
.nav-tabs.nav-justified {
width: 100%;
border-bottom: 0;
}
.nav-tabs.nav-justified > li {
float: none;
}
.nav-tabs.nav-justified > li > a {
text-align: center;
margin-bottom: 5px;
}
.nav-tabs.nav-justified > .dropdown .dropdown-menu {
top: auto;
left: auto;
}
@media (min-width: 768px) {
.nav-tabs.nav-justified > li {
display: table-cell;
width: 1%;
}
.nav-tabs.nav-justified > li > a {
margin-bottom: 0;
}
}
.nav-tabs.nav-justified > li > a {
margin-right: 0;
border-radius: 2px;
}
.nav-tabs.nav-justified > .active > a,
.nav-tabs.nav-justified > .active > a:hover,
.nav-tabs.nav-justified > .active > a:focus {
border: 1px solid #ddd;
}
@media (min-width: 768px) {
.nav-tabs.nav-justified > li > a {
border-bottom: 1px solid #ddd;
border-radius: 2px 2px 0 0;
}
.nav-tabs.nav-justified > .active > a,
.nav-tabs.nav-justified > .active > a:hover,
.nav-tabs.nav-justified > .active > a:focus {
border-bottom-color: #fff;
}
}
.nav-pills > li {
float: left;
}
.nav-pills > li > a {
border-radius: 2px;
}
.nav-pills > li + li {
margin-left: 2px;
}
.nav-pills > li.active > a,
.nav-pills > li.active > a:hover,
.nav-pills > li.active > a:focus {
color: #fff;
background-color: #337ab7;
}
.nav-stacked > li {
float: none;
}
.nav-stacked > li + li {
margin-top: 2px;
margin-left: 0;
}
.nav-justified {
width: 100%;
}
.nav-justified > li {
float: none;
}
.nav-justified > li > a {
text-align: center;
margin-bottom: 5px;
}
.nav-justified > .dropdown .dropdown-menu {
top: auto;
left: auto;
}
@media (min-width: 768px) {
.nav-justified > li {
display: table-cell;
width: 1%;
}
.nav-justified > li > a {
margin-bottom: 0;
}
}
.nav-tabs-justified {
border-bottom: 0;
}
.nav-tabs-justified > li > a {
margin-right: 0;
border-radius: 2px;
}
.nav-tabs-justified > .active > a,
.nav-tabs-justified > .active > a:hover,
.nav-tabs-justified > .active > a:focus {
border: 1px solid #ddd;
}
@media (min-width: 768px) {
.nav-tabs-justified > li > a {
border-bottom: 1px solid #ddd;
border-radius: 2px 2px 0 0;
}
.nav-tabs-justified > .active > a,
.nav-tabs-justified > .active > a:hover,
.nav-tabs-justified > .active > a:focus {
border-bottom-color: #fff;
}
}
.tab-content > .tab-pane {
display: none;
}
.tab-content > .active {
display: block;
}
.nav-tabs .dropdown-menu {
margin-top: -1px;
border-top-right-radius: 0;
border-top-left-radius: 0;
}
.navbar {
position: relative;
min-height: 30px;
margin-bottom: 18px;
border: 1px solid transparent;
}
@media (min-width: 541px) {
.navbar {
border-radius: 2px;
}
}
@media (min-width: 541px) {
.navbar-header {
float: left;
}
}
.navbar-collapse {
overflow-x: visible;
padding-right: 0px;
padding-left: 0px;
border-top: 1px solid transparent;
box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.1);
-webkit-overflow-scrolling: touch;
}
.navbar-collapse.in {
overflow-y: auto;
}
@media (min-width: 541px) {
.navbar-collapse {
width: auto;
border-top: 0;
box-shadow: none;
}
.navbar-collapse.collapse {
display: block !important;
height: auto !important;
padding-bottom: 0;
overflow: visible !important;
}
.navbar-collapse.in {
overflow-y: visible;
}
.navbar-fixed-top .navbar-collapse,
.navbar-static-top .navbar-collapse,
.navbar-fixed-bottom .navbar-collapse {
padding-left: 0;
padding-right: 0;
}
}
.navbar-fixed-top .navbar-collapse,
.navbar-fixed-bottom .navbar-collapse {
max-height: 340px;
}
@media (max-device-width: 540px) and (orientation: landscape) {
.navbar-fixed-top .navbar-collapse,
.navbar-fixed-bottom .navbar-collapse {
max-height: 200px;
}
}
.container > .navbar-header,
.container-fluid > .navbar-header,
.container > .navbar-collapse,
.container-fluid > .navbar-collapse {
margin-right: 0px;
margin-left: 0px;
}
@media (min-width: 541px) {
.container > .navbar-header,
.container-fluid > .navbar-header,
.container > .navbar-collapse,
.container-fluid > .navbar-collapse {
margin-right: 0;
margin-left: 0;
}
}
.navbar-static-top {
z-index: 1000;
border-width: 0 0 1px;
}
@media (min-width: 541px) {
.navbar-static-top {
border-radius: 0;
}
}
.navbar-fixed-top,
.navbar-fixed-bottom {
position: fixed;
right: 0;
left: 0;
z-index: 1030;
}
@media (min-width: 541px) {
.navbar-fixed-top,
.navbar-fixed-bottom {
border-radius: 0;
}
}
.navbar-fixed-top {
top: 0;
border-width: 0 0 1px;
}
.navbar-fixed-bottom {
bottom: 0;
margin-bottom: 0;
border-width: 1px 0 0;
}
.navbar-brand {
float: left;
padding: 6px 0px;
font-size: 17px;
line-height: 18px;
height: 30px;
}
.navbar-brand:hover,
.navbar-brand:focus {
text-decoration: none;
}
.navbar-brand > img {
display: block;
}
@media (min-width: 541px) {
.navbar > .container .navbar-brand,
.navbar > .container-fluid .navbar-brand {
margin-left: 0px;
}
}
.navbar-toggle {
position: relative;
float: right;
margin-right: 0px;
padding: 9px 10px;
margin-top: -2px;
margin-bottom: -2px;
background-color: transparent;
background-image: none;
border: 1px solid transparent;
border-radius: 2px;
}
.navbar-toggle:focus {
outline: 0;
}
.navbar-toggle .icon-bar {
display: block;
width: 22px;
height: 2px;
border-radius: 1px;
}
.navbar-toggle .icon-bar + .icon-bar {
margin-top: 4px;
}
@media (min-width: 541px) {
.navbar-toggle {
display: none;
}
}
.navbar-nav {
margin: 3px 0px;
}
.navbar-nav > li > a {
padding-top: 10px;
padding-bottom: 10px;
line-height: 18px;
}
@media (max-width: 540px) {
.navbar-nav .open .dropdown-menu {
position: static;
float: none;
width: auto;
margin-top: 0;
background-color: transparent;
border: 0;
box-shadow: none;
}
.navbar-nav .open .dropdown-menu > li > a,
.navbar-nav .open .dropdown-menu .dropdown-header {
padding: 5px 15px 5px 25px;
}
.navbar-nav .open .dropdown-menu > li > a {
line-height: 18px;
}
.navbar-nav .open .dropdown-menu > li > a:hover,
.navbar-nav .open .dropdown-menu > li > a:focus {
background-image: none;
}
}
@media (min-width: 541px) {
.navbar-nav {
float: left;
margin: 0;
}
.navbar-nav > li {
float: left;
}
.navbar-nav > li > a {
padding-top: 6px;
padding-bottom: 6px;
}
}
.navbar-form {
margin-left: 0px;
margin-right: 0px;
padding: 10px 0px;
border-top: 1px solid transparent;
border-bottom: 1px solid transparent;
-webkit-box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.1), 0 1px 0 rgba(255, 255, 255, 0.1);
box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.1), 0 1px 0 rgba(255, 255, 255, 0.1);
margin-top: -1px;
margin-bottom: -1px;
}
@media (min-width: 768px) {
.navbar-form .form-group {
display: inline-block;
margin-bottom: 0;
vertical-align: middle;
}
.navbar-form .form-control {
display: inline-block;
width: auto;
vertical-align: middle;
}
.navbar-form .form-control-static {
display: inline-block;
}
.navbar-form .input-group {
display: inline-table;
vertical-align: middle;
}
.navbar-form .input-group .input-group-addon,
.navbar-form .input-group .input-group-btn,
.navbar-form .input-group .form-control {
width: auto;
}
.navbar-form .input-group > .form-control {
width: 100%;
}
.navbar-form .control-label {
margin-bottom: 0;
vertical-align: middle;
}
.navbar-form .radio,
.navbar-form .checkbox {
display: inline-block;
margin-top: 0;
margin-bottom: 0;
vertical-align: middle;
}
.navbar-form .radio label,
.navbar-form .checkbox label {
padding-left: 0;
}
.navbar-form .radio input[type="radio"],
.navbar-form .checkbox input[type="checkbox"] {
position: relative;
margin-left: 0;
}
.navbar-form .has-feedback .form-control-feedback {
top: 0;
}
}
@media (max-width: 540px) {
.navbar-form .form-group {
margin-bottom: 5px;
}
.navbar-form .form-group:last-child {
margin-bottom: 0;
}
}
@media (min-width: 541px) {
.navbar-form {
width: auto;
border: 0;
margin-left: 0;
margin-right: 0;
padding-top: 0;
padding-bottom: 0;
-webkit-box-shadow: none;
box-shadow: none;
}
}
.navbar-nav > li > .dropdown-menu {
margin-top: 0;
border-top-right-radius: 0;
border-top-left-radius: 0;
}
.navbar-fixed-bottom .navbar-nav > li > .dropdown-menu {
margin-bottom: 0;
border-top-right-radius: 2px;
border-top-left-radius: 2px;
border-bottom-right-radius: 0;
border-bottom-left-radius: 0;
}
.navbar-btn {
margin-top: -1px;
margin-bottom: -1px;
}
.navbar-btn.btn-sm {
margin-top: 0px;
margin-bottom: 0px;
}
.navbar-btn.btn-xs {
margin-top: 4px;
margin-bottom: 4px;
}
.navbar-text {
margin-top: 6px;
margin-bottom: 6px;
}
@media (min-width: 541px) {
.navbar-text {
float: left;
margin-left: 0px;
margin-right: 0px;
}
}
@media (min-width: 541px) {
.navbar-left {
float: left !important;
float: left;
}
.navbar-right {
float: right !important;
float: right;
margin-right: 0px;
}
.navbar-right ~ .navbar-right {
margin-right: 0;
}
}
.navbar-default {
background-color: #f8f8f8;
border-color: #e7e7e7;
}
.navbar-default .navbar-brand {
color: #777;
}
.navbar-default .navbar-brand:hover,
.navbar-default .navbar-brand:focus {
color: #5e5e5e;
background-color: transparent;
}
.navbar-default .navbar-text {
color: #777;
}
.navbar-default .navbar-nav > li > a {
color: #777;
}
.navbar-default .navbar-nav > li > a:hover,
.navbar-default .navbar-nav > li > a:focus {
color: #333;
background-color: transparent;
}
.navbar-default .navbar-nav > .active > a,
.navbar-default .navbar-nav > .active > a:hover,
.navbar-default .navbar-nav > .active > a:focus {
color: #555;
background-color: #e7e7e7;
}
.navbar-default .navbar-nav > .disabled > a,
.navbar-default .navbar-nav > .disabled > a:hover,
.navbar-default .navbar-nav > .disabled > a:focus {
color: #ccc;
background-color: transparent;
}
.navbar-default .navbar-toggle {
border-color: #ddd;
}
.navbar-default .navbar-toggle:hover,
.navbar-default .navbar-toggle:focus {
background-color: #ddd;
}
.navbar-default .navbar-toggle .icon-bar {
background-color: #888;
}
.navbar-default .navbar-collapse,
.navbar-default .navbar-form {
border-color: #e7e7e7;
}
.navbar-default .navbar-nav > .open > a,
.navbar-default .navbar-nav > .open > a:hover,
.navbar-default .navbar-nav > .open > a:focus {
background-color: #e7e7e7;
color: #555;
}
@media (max-width: 540px) {
.navbar-default .navbar-nav .open .dropdown-menu > li > a {
color: #777;
}
.navbar-default .navbar-nav .open .dropdown-menu > li > a:hover,
.navbar-default .navbar-nav .open .dropdown-menu > li > a:focus {
color: #333;
background-color: transparent;
}
.navbar-default .navbar-nav .open .dropdown-menu > .active > a,
.navbar-default .navbar-nav .open .dropdown-menu > .active > a:hover,
.navbar-default .navbar-nav .open .dropdown-menu > .active > a:focus {
color: #555;
background-color: #e7e7e7;
}
.navbar-default .navbar-nav .open .dropdown-menu > .disabled > a,
.navbar-default .navbar-nav .open .dropdown-menu > .disabled > a:hover,
.navbar-default .navbar-nav .open .dropdown-menu > .disabled > a:focus {
color: #ccc;
background-color: transparent;
}
}
.navbar-default .navbar-link {
color: #777;
}
.navbar-default .navbar-link:hover {
color: #333;
}
.navbar-default .btn-link {
color: #777;
}
.navbar-default .btn-link:hover,
.navbar-default .btn-link:focus {
color: #333;
}
.navbar-default .btn-link[disabled]:hover,
fieldset[disabled] .navbar-default .btn-link:hover,
.navbar-default .btn-link[disabled]:focus,
fieldset[disabled] .navbar-default .btn-link:focus {
color: #ccc;
}
.navbar-inverse {
background-color: #222;
border-color: #080808;
}
.navbar-inverse .navbar-brand {
color: #9d9d9d;
}
.navbar-inverse .navbar-brand:hover,
.navbar-inverse .navbar-brand:focus {
color: #fff;
background-color: transparent;
}
.navbar-inverse .navbar-text {
color: #9d9d9d;
}
.navbar-inverse .navbar-nav > li > a {
color: #9d9d9d;
}
.navbar-inverse .navbar-nav > li > a:hover,
.navbar-inverse .navbar-nav > li > a:focus {
color: #fff;
background-color: transparent;
}
.navbar-inverse .navbar-nav > .active > a,
.navbar-inverse .navbar-nav > .active > a:hover,
.navbar-inverse .navbar-nav > .active > a:focus {
color: #fff;
background-color: #080808;
}
.navbar-inverse .navbar-nav > .disabled > a,
.navbar-inverse .navbar-nav > .disabled > a:hover,
.navbar-inverse .navbar-nav > .disabled > a:focus {
color: #444;
background-color: transparent;
}
.navbar-inverse .navbar-toggle {
border-color: #333;
}
.navbar-inverse .navbar-toggle:hover,
.navbar-inverse .navbar-toggle:focus {
background-color: #333;
}
.navbar-inverse .navbar-toggle .icon-bar {
background-color: #fff;
}
.navbar-inverse .navbar-collapse,
.navbar-inverse .navbar-form {
border-color: #101010;
}
.navbar-inverse .navbar-nav > .open > a,
.navbar-inverse .navbar-nav > .open > a:hover,
.navbar-inverse .navbar-nav > .open > a:focus {
background-color: #080808;
color: #fff;
}
@media (max-width: 540px) {
.navbar-inverse .navbar-nav .open .dropdown-menu > .dropdown-header {
border-color: #080808;
}
.navbar-inverse .navbar-nav .open .dropdown-menu .divider {
background-color: #080808;
}
.navbar-inverse .navbar-nav .open .dropdown-menu > li > a {
color: #9d9d9d;
}
.navbar-inverse .navbar-nav .open .dropdown-menu > li > a:hover,
.navbar-inverse .navbar-nav .open .dropdown-menu > li > a:focus {
color: #fff;
background-color: transparent;
}
.navbar-inverse .navbar-nav .open .dropdown-menu > .active > a,
.navbar-inverse .navbar-nav .open .dropdown-menu > .active > a:hover,
.navbar-inverse .navbar-nav .open .dropdown-menu > .active > a:focus {
color: #fff;
background-color: #080808;
}
.navbar-inverse .navbar-nav .open .dropdown-menu > .disabled > a,
.navbar-inverse .navbar-nav .open .dropdown-menu > .disabled > a:hover,
.navbar-inverse .navbar-nav .open .dropdown-menu > .disabled > a:focus {
color: #444;
background-color: transparent;
}
}
.navbar-inverse .navbar-link {
color: #9d9d9d;
}
.navbar-inverse .navbar-link:hover {
color: #fff;
}
.navbar-inverse .btn-link {
color: #9d9d9d;
}
.navbar-inverse .btn-link:hover,
.navbar-inverse .btn-link:focus {
color: #fff;
}
.navbar-inverse .btn-link[disabled]:hover,
fieldset[disabled] .navbar-inverse .btn-link:hover,
.navbar-inverse .btn-link[disabled]:focus,
fieldset[disabled] .navbar-inverse .btn-link:focus {
color: #444;
}
.breadcrumb {
padding: 8px 15px;
margin-bottom: 18px;
list-style: none;
background-color: #f5f5f5;
border-radius: 2px;
}
.breadcrumb > li {
display: inline-block;
}
.breadcrumb > li + li:before {
content: "/\00a0";
padding: 0 5px;
color: #5e5e5e;
}
.breadcrumb > .active {
color: #777777;
}
.pagination {
display: inline-block;
padding-left: 0;
margin: 18px 0;
border-radius: 2px;
}
.pagination > li {
display: inline;
}
.pagination > li > a,
.pagination > li > span {
position: relative;
float: left;
padding: 6px 12px;
line-height: 1.42857143;
text-decoration: none;
color: #337ab7;
background-color: #fff;
border: 1px solid #ddd;
margin-left: -1px;
}
.pagination > li:first-child > a,
.pagination > li:first-child > span {
margin-left: 0;
border-bottom-left-radius: 2px;
border-top-left-radius: 2px;
}
.pagination > li:last-child > a,
.pagination > li:last-child > span {
border-bottom-right-radius: 2px;
border-top-right-radius: 2px;
}
.pagination > li > a:hover,
.pagination > li > span:hover,
.pagination > li > a:focus,
.pagination > li > span:focus {
z-index: 2;
color: #23527c;
background-color: #eeeeee;
border-color: #ddd;
}
.pagination > .active > a,
.pagination > .active > span,
.pagination > .active > a:hover,
.pagination > .active > span:hover,
.pagination > .active > a:focus,
.pagination > .active > span:focus {
z-index: 3;
color: #fff;
background-color: #337ab7;
border-color: #337ab7;
cursor: default;
}
.pagination > .disabled > span,
.pagination > .disabled > span:hover,
.pagination > .disabled > span:focus,
.pagination > .disabled > a,
.pagination > .disabled > a:hover,
.pagination > .disabled > a:focus {
color: #777777;
background-color: #fff;
border-color: #ddd;
cursor: not-allowed;
}
.pagination-lg > li > a,
.pagination-lg > li > span {
padding: 10px 16px;
font-size: 17px;
line-height: 1.3333333;
}
.pagination-lg > li:first-child > a,
.pagination-lg > li:first-child > span {
border-bottom-left-radius: 3px;
border-top-left-radius: 3px;
}
.pagination-lg > li:last-child > a,
.pagination-lg > li:last-child > span {
border-bottom-right-radius: 3px;
border-top-right-radius: 3px;
}
.pagination-sm > li > a,
.pagination-sm > li > span {
padding: 5px 10px;
font-size: 12px;
line-height: 1.5;
}
.pagination-sm > li:first-child > a,
.pagination-sm > li:first-child > span {
border-bottom-left-radius: 1px;
border-top-left-radius: 1px;
}
.pagination-sm > li:last-child > a,
.pagination-sm > li:last-child > span {
border-bottom-right-radius: 1px;
border-top-right-radius: 1px;
}
.pager {
padding-left: 0;
margin: 18px 0;
list-style: none;
text-align: center;
}
.pager li {
display: inline;
}
.pager li > a,
.pager li > span {
display: inline-block;
padding: 5px 14px;
background-color: #fff;
border: 1px solid #ddd;
border-radius: 15px;
}
.pager li > a:hover,
.pager li > a:focus {
text-decoration: none;
background-color: #eeeeee;
}
.pager .next > a,
.pager .next > span {
float: right;
}
.pager .previous > a,
.pager .previous > span {
float: left;
}
.pager .disabled > a,
.pager .disabled > a:hover,
.pager .disabled > a:focus,
.pager .disabled > span {
color: #777777;
background-color: #fff;
cursor: not-allowed;
}
.label {
display: inline;
padding: .2em .6em .3em;
font-size: 75%;
font-weight: bold;
line-height: 1;
color: #fff;
text-align: center;
white-space: nowrap;
vertical-align: baseline;
border-radius: .25em;
}
a.label:hover,
a.label:focus {
color: #fff;
text-decoration: none;
cursor: pointer;
}
.label:empty {
display: none;
}
.btn .label {
position: relative;
top: -1px;
}
.label-default {
background-color: #777777;
}
.label-default[href]:hover,
.label-default[href]:focus {
background-color: #5e5e5e;
}
.label-primary {
background-color: #337ab7;
}
.label-primary[href]:hover,
.label-primary[href]:focus {
background-color: #286090;
}
.label-success {
background-color: #5cb85c;
}
.label-success[href]:hover,
.label-success[href]:focus {
background-color: #449d44;
}
.label-info {
background-color: #5bc0de;
}
.label-info[href]:hover,
.label-info[href]:focus {
background-color: #31b0d5;
}
.label-warning {
background-color: #f0ad4e;
}
.label-warning[href]:hover,
.label-warning[href]:focus {
background-color: #ec971f;
}
.label-danger {
background-color: #d9534f;
}
.label-danger[href]:hover,
.label-danger[href]:focus {
background-color: #c9302c;
}
.badge {
display: inline-block;
min-width: 10px;
padding: 3px 7px;
font-size: 12px;
font-weight: bold;
color: #fff;
line-height: 1;
vertical-align: middle;
white-space: nowrap;
text-align: center;
background-color: #777777;
border-radius: 10px;
}
.badge:empty {
display: none;
}
.btn .badge {
position: relative;
top: -1px;
}
.btn-xs .badge,
.btn-group-xs > .btn .badge {
top: 0;
padding: 1px 5px;
}
a.badge:hover,
a.badge:focus {
color: #fff;
text-decoration: none;
cursor: pointer;
}
.list-group-item.active > .badge,
.nav-pills > .active > a > .badge {
color: #337ab7;
background-color: #fff;
}
.list-group-item > .badge {
float: right;
}
.list-group-item > .badge + .badge {
margin-right: 5px;
}
.nav-pills > li > a > .badge {
margin-left: 3px;
}
.jumbotron {
padding-top: 30px;
padding-bottom: 30px;
margin-bottom: 30px;
color: inherit;
background-color: #eeeeee;
}
.jumbotron h1,
.jumbotron .h1 {
color: inherit;
}
.jumbotron p {
margin-bottom: 15px;
font-size: 20px;
font-weight: 200;
}
.jumbotron > hr {
border-top-color: #d5d5d5;
}
.container .jumbotron,
.container-fluid .jumbotron {
border-radius: 3px;
padding-left: 0px;
padding-right: 0px;
}
.jumbotron .container {
max-width: 100%;
}
@media screen and (min-width: 768px) {
.jumbotron {
padding-top: 48px;
padding-bottom: 48px;
}
.container .jumbotron,
.container-fluid .jumbotron {
padding-left: 60px;
padding-right: 60px;
}
.jumbotron h1,
.jumbotron .h1 {
font-size: 59px;
}
}
.thumbnail {
display: block;
padding: 4px;
margin-bottom: 18px;
line-height: 1.42857143;
background-color: #fff;
border: 1px solid #ddd;
border-radius: 2px;
-webkit-transition: border 0.2s ease-in-out;
-o-transition: border 0.2s ease-in-out;
transition: border 0.2s ease-in-out;
}
.thumbnail > img,
.thumbnail a > img {
margin-left: auto;
margin-right: auto;
}
a.thumbnail:hover,
a.thumbnail:focus,
a.thumbnail.active {
border-color: #337ab7;
}
.thumbnail .caption {
padding: 9px;
color: #000;
}
.alert {
padding: 15px;
margin-bottom: 18px;
border: 1px solid transparent;
border-radius: 2px;
}
.alert h4 {
margin-top: 0;
color: inherit;
}
.alert .alert-link {
font-weight: bold;
}
.alert > p,
.alert > ul {
margin-bottom: 0;
}
.alert > p + p {
margin-top: 5px;
}
.alert-dismissable,
.alert-dismissible {
padding-right: 35px;
}
.alert-dismissable .close,
.alert-dismissible .close {
position: relative;
top: -2px;
right: -21px;
color: inherit;
}
.alert-success {
background-color: #dff0d8;
border-color: #d6e9c6;
color: #3c763d;
}
.alert-success hr {
border-top-color: #c9e2b3;
}
.alert-success .alert-link {
color: #2b542c;
}
.alert-info {
background-color: #d9edf7;
border-color: #bce8f1;
color: #31708f;
}
.alert-info hr {
border-top-color: #a6e1ec;
}
.alert-info .alert-link {
color: #245269;
}
.alert-warning {
background-color: #fcf8e3;
border-color: #faebcc;
color: #8a6d3b;
}
.alert-warning hr {
border-top-color: #f7e1b5;
}
.alert-warning .alert-link {
color: #66512c;
}
.alert-danger {
background-color: #f2dede;
border-color: #ebccd1;
color: #a94442;
}
.alert-danger hr {
border-top-color: #e4b9c0;
}
.alert-danger .alert-link {
color: #843534;
}
@-webkit-keyframes progress-bar-stripes {
from {
background-position: 40px 0;
}
to {
background-position: 0 0;
}
}
@keyframes progress-bar-stripes {
from {
background-position: 40px 0;
}
to {
background-position: 0 0;
}
}
.progress {
overflow: hidden;
height: 18px;
margin-bottom: 18px;
background-color: #f5f5f5;
border-radius: 2px;
-webkit-box-shadow: inset 0 1px 2px rgba(0, 0, 0, 0.1);
box-shadow: inset 0 1px 2px rgba(0, 0, 0, 0.1);
}
.progress-bar {
float: left;
width: 0%;
height: 100%;
font-size: 12px;
line-height: 18px;
color: #fff;
text-align: center;
background-color: #337ab7;
-webkit-box-shadow: inset 0 -1px 0 rgba(0, 0, 0, 0.15);
box-shadow: inset 0 -1px 0 rgba(0, 0, 0, 0.15);
-webkit-transition: width 0.6s ease;
-o-transition: width 0.6s ease;
transition: width 0.6s ease;
}
.progress-striped .progress-bar,
.progress-bar-striped {
background-image: -webkit-linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);
background-image: -o-linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);
background-image: linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);
background-size: 40px 40px;
}
.progress.active .progress-bar,
.progress-bar.active {
-webkit-animation: progress-bar-stripes 2s linear infinite;
-o-animation: progress-bar-stripes 2s linear infinite;
animation: progress-bar-stripes 2s linear infinite;
}
.progress-bar-success {
background-color: #5cb85c;
}
.progress-striped .progress-bar-success {
background-image: -webkit-linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);
background-image: -o-linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);
background-image: linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);
}
.progress-bar-info {
background-color: #5bc0de;
}
.progress-striped .progress-bar-info {
background-image: -webkit-linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);
background-image: -o-linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);
background-image: linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);
}
.progress-bar-warning {
background-color: #f0ad4e;
}
.progress-striped .progress-bar-warning {
background-image: -webkit-linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);
background-image: -o-linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);
background-image: linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);
}
.progress-bar-danger {
background-color: #d9534f;
}
.progress-striped .progress-bar-danger {
background-image: -webkit-linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);
background-image: -o-linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);
background-image: linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);
}
.media {
margin-top: 15px;
}
.media:first-child {
margin-top: 0;
}
.media,
.media-body {
zoom: 1;
overflow: hidden;
}
.media-body {
width: 10000px;
}
.media-object {
display: block;
}
.media-object.img-thumbnail {
max-width: none;
}
.media-right,
.media > .pull-right {
padding-left: 10px;
}
.media-left,
.media > .pull-left {
padding-right: 10px;
}
.media-left,
.media-right,
.media-body {
display: table-cell;
vertical-align: top;
}
.media-middle {
vertical-align: middle;
}
.media-bottom {
vertical-align: bottom;
}
.media-heading {
margin-top: 0;
margin-bottom: 5px;
}
.media-list {
padding-left: 0;
list-style: none;
}
.list-group {
margin-bottom: 20px;
padding-left: 0;
}
.list-group-item {
position: relative;
display: block;
padding: 10px 15px;
margin-bottom: -1px;
background-color: #fff;
border: 1px solid #ddd;
}
.list-group-item:first-child {
border-top-right-radius: 2px;
border-top-left-radius: 2px;
}
.list-group-item:last-child {
margin-bottom: 0;
border-bottom-right-radius: 2px;
border-bottom-left-radius: 2px;
}
a.list-group-item,
button.list-group-item {
color: #555;
}
a.list-group-item .list-group-item-heading,
button.list-group-item .list-group-item-heading {
color: #333;
}
a.list-group-item:hover,
button.list-group-item:hover,
a.list-group-item:focus,
button.list-group-item:focus {
text-decoration: none;
color: #555;
background-color: #f5f5f5;
}
button.list-group-item {
width: 100%;
text-align: left;
}
.list-group-item.disabled,
.list-group-item.disabled:hover,
.list-group-item.disabled:focus {
background-color: #eeeeee;
color: #777777;
cursor: not-allowed;
}
.list-group-item.disabled .list-group-item-heading,
.list-group-item.disabled:hover .list-group-item-heading,
.list-group-item.disabled:focus .list-group-item-heading {
color: inherit;
}
.list-group-item.disabled .list-group-item-text,
.list-group-item.disabled:hover .list-group-item-text,
.list-group-item.disabled:focus .list-group-item-text {
color: #777777;
}
.list-group-item.active,
.list-group-item.active:hover,
.list-group-item.active:focus {
z-index: 2;
color: #fff;
background-color: #337ab7;
border-color: #337ab7;
}
.list-group-item.active .list-group-item-heading,
.list-group-item.active:hover .list-group-item-heading,
.list-group-item.active:focus .list-group-item-heading,
.list-group-item.active .list-group-item-heading > small,
.list-group-item.active:hover .list-group-item-heading > small,
.list-group-item.active:focus .list-group-item-heading > small,
.list-group-item.active .list-group-item-heading > .small,
.list-group-item.active:hover .list-group-item-heading > .small,
.list-group-item.active:focus .list-group-item-heading > .small {
color: inherit;
}
.list-group-item.active .list-group-item-text,
.list-group-item.active:hover .list-group-item-text,
.list-group-item.active:focus .list-group-item-text {
color: #c7ddef;
}
.list-group-item-success {
color: #3c763d;
background-color: #dff0d8;
}
a.list-group-item-success,
button.list-group-item-success {
color: #3c763d;
}
a.list-group-item-success .list-group-item-heading,
button.list-group-item-success .list-group-item-heading {
color: inherit;
}
a.list-group-item-success:hover,
button.list-group-item-success:hover,
a.list-group-item-success:focus,
button.list-group-item-success:focus {
color: #3c763d;
background-color: #d0e9c6;
}
a.list-group-item-success.active,
button.list-group-item-success.active,
a.list-group-item-success.active:hover,
button.list-group-item-success.active:hover,
a.list-group-item-success.active:focus,
button.list-group-item-success.active:focus {
color: #fff;
background-color: #3c763d;
border-color: #3c763d;
}
.list-group-item-info {
color: #31708f;
background-color: #d9edf7;
}
a.list-group-item-info,
button.list-group-item-info {
color: #31708f;
}
a.list-group-item-info .list-group-item-heading,
button.list-group-item-info .list-group-item-heading {
color: inherit;
}
a.list-group-item-info:hover,
button.list-group-item-info:hover,
a.list-group-item-info:focus,
button.list-group-item-info:focus {
color: #31708f;
background-color: #c4e3f3;
}
a.list-group-item-info.active,
button.list-group-item-info.active,
a.list-group-item-info.active:hover,
button.list-group-item-info.active:hover,
a.list-group-item-info.active:focus,
button.list-group-item-info.active:focus {
color: #fff;
background-color: #31708f;
border-color: #31708f;
}
.list-group-item-warning {
color: #8a6d3b;
background-color: #fcf8e3;
}
a.list-group-item-warning,
button.list-group-item-warning {
color: #8a6d3b;
}
a.list-group-item-warning .list-group-item-heading,
button.list-group-item-warning .list-group-item-heading {
color: inherit;
}
a.list-group-item-warning:hover,
button.list-group-item-warning:hover,
a.list-group-item-warning:focus,
button.list-group-item-warning:focus {
color: #8a6d3b;
background-color: #faf2cc;
}
a.list-group-item-warning.active,
button.list-group-item-warning.active,
a.list-group-item-warning.active:hover,
button.list-group-item-warning.active:hover,
a.list-group-item-warning.active:focus,
button.list-group-item-warning.active:focus {
color: #fff;
background-color: #8a6d3b;
border-color: #8a6d3b;
}
.list-group-item-danger {
color: #a94442;
background-color: #f2dede;
}
a.list-group-item-danger,
button.list-group-item-danger {
color: #a94442;
}
a.list-group-item-danger .list-group-item-heading,
button.list-group-item-danger .list-group-item-heading {
color: inherit;
}
a.list-group-item-danger:hover,
button.list-group-item-danger:hover,
a.list-group-item-danger:focus,
button.list-group-item-danger:focus {
color: #a94442;
background-color: #ebcccc;
}
a.list-group-item-danger.active,
button.list-group-item-danger.active,
a.list-group-item-danger.active:hover,
button.list-group-item-danger.active:hover,
a.list-group-item-danger.active:focus,
button.list-group-item-danger.active:focus {
color: #fff;
background-color: #a94442;
border-color: #a94442;
}
.list-group-item-heading {
margin-top: 0;
margin-bottom: 5px;
}
.list-group-item-text {
margin-bottom: 0;
line-height: 1.3;
}
.panel {
margin-bottom: 18px;
background-color: #fff;
border: 1px solid transparent;
border-radius: 2px;
-webkit-box-shadow: 0 1px 1px rgba(0, 0, 0, 0.05);
box-shadow: 0 1px 1px rgba(0, 0, 0, 0.05);
}
.panel-body {
padding: 15px;
}
.panel-heading {
padding: 10px 15px;
border-bottom: 1px solid transparent;
border-top-right-radius: 1px;
border-top-left-radius: 1px;
}
.panel-heading > .dropdown .dropdown-toggle {
color: inherit;
}
.panel-title {
margin-top: 0;
margin-bottom: 0;
font-size: 15px;
color: inherit;
}
.panel-title > a,
.panel-title > small,
.panel-title > .small,
.panel-title > small > a,
.panel-title > .small > a {
color: inherit;
}
.panel-footer {
padding: 10px 15px;
background-color: #f5f5f5;
border-top: 1px solid #ddd;
border-bottom-right-radius: 1px;
border-bottom-left-radius: 1px;
}
.panel > .list-group,
.panel > .panel-collapse > .list-group {
margin-bottom: 0;
}
.panel > .list-group .list-group-item,
.panel > .panel-collapse > .list-group .list-group-item {
border-width: 1px 0;
border-radius: 0;
}
.panel > .list-group:first-child .list-group-item:first-child,
.panel > .panel-collapse > .list-group:first-child .list-group-item:first-child {
border-top: 0;
border-top-right-radius: 1px;
border-top-left-radius: 1px;
}
.panel > .list-group:last-child .list-group-item:last-child,
.panel > .panel-collapse > .list-group:last-child .list-group-item:last-child {
border-bottom: 0;
border-bottom-right-radius: 1px;
border-bottom-left-radius: 1px;
}
.panel > .panel-heading + .panel-collapse > .list-group .list-group-item:first-child {
border-top-right-radius: 0;
border-top-left-radius: 0;
}
.panel-heading + .list-group .list-group-item:first-child {
border-top-width: 0;
}
.list-group + .panel-footer {
border-top-width: 0;
}
.panel > .table,
.panel > .table-responsive > .table,
.panel > .panel-collapse > .table {
margin-bottom: 0;
}
.panel > .table caption,
.panel > .table-responsive > .table caption,
.panel > .panel-collapse > .table caption {
padding-left: 15px;
padding-right: 15px;
}
.panel > .table:first-child,
.panel > .table-responsive:first-child > .table:first-child {
border-top-right-radius: 1px;
border-top-left-radius: 1px;
}
.panel > .table:first-child > thead:first-child > tr:first-child,
.panel > .table-responsive:first-child > .table:first-child > thead:first-child > tr:first-child,
.panel > .table:first-child > tbody:first-child > tr:first-child,
.panel > .table-responsive:first-child > .table:first-child > tbody:first-child > tr:first-child {
border-top-left-radius: 1px;
border-top-right-radius: 1px;
}
.panel > .table:first-child > thead:first-child > tr:first-child td:first-child,
.panel > .table-responsive:first-child > .table:first-child > thead:first-child > tr:first-child td:first-child,
.panel > .table:first-child > tbody:first-child > tr:first-child td:first-child,
.panel > .table-responsive:first-child > .table:first-child > tbody:first-child > tr:first-child td:first-child,
.panel > .table:first-child > thead:first-child > tr:first-child th:first-child,
.panel > .table-responsive:first-child > .table:first-child > thead:first-child > tr:first-child th:first-child,
.panel > .table:first-child > tbody:first-child > tr:first-child th:first-child,
.panel > .table-responsive:first-child > .table:first-child > tbody:first-child > tr:first-child th:first-child {
border-top-left-radius: 1px;
}
.panel > .table:first-child > thead:first-child > tr:first-child td:last-child,
.panel > .table-responsive:first-child > .table:first-child > thead:first-child > tr:first-child td:last-child,
.panel > .table:first-child > tbody:first-child > tr:first-child td:last-child,
.panel > .table-responsive:first-child > .table:first-child > tbody:first-child > tr:first-child td:last-child,
.panel > .table:first-child > thead:first-child > tr:first-child th:last-child,
.panel > .table-responsive:first-child > .table:first-child > thead:first-child > tr:first-child th:last-child,
.panel > .table:first-child > tbody:first-child > tr:first-child th:last-child,
.panel > .table-responsive:first-child > .table:first-child > tbody:first-child > tr:first-child th:last-child {
border-top-right-radius: 1px;
}
.panel > .table:last-child,
.panel > .table-responsive:last-child > .table:last-child {
border-bottom-right-radius: 1px;
border-bottom-left-radius: 1px;
}
.panel > .table:last-child > tbody:last-child > tr:last-child,
.panel > .table-responsive:last-child > .table:last-child > tbody:last-child > tr:last-child,
.panel > .table:last-child > tfoot:last-child > tr:last-child,
.panel > .table-responsive:last-child > .table:last-child > tfoot:last-child > tr:last-child {
border-bottom-left-radius: 1px;
border-bottom-right-radius: 1px;
}
.panel > .table:last-child > tbody:last-child > tr:last-child td:first-child,
.panel > .table-responsive:last-child > .table:last-child > tbody:last-child > tr:last-child td:first-child,
.panel > .table:last-child > tfoot:last-child > tr:last-child td:first-child,
.panel > .table-responsive:last-child > .table:last-child > tfoot:last-child > tr:last-child td:first-child,
.panel > .table:last-child > tbody:last-child > tr:last-child th:first-child,
.panel > .table-responsive:last-child > .table:last-child > tbody:last-child > tr:last-child th:first-child,
.panel > .table:last-child > tfoot:last-child > tr:last-child th:first-child,
.panel > .table-responsive:last-child > .table:last-child > tfoot:last-child > tr:last-child th:first-child {
border-bottom-left-radius: 1px;
}
.panel > .table:last-child > tbody:last-child > tr:last-child td:last-child,
.panel > .table-responsive:last-child > .table:last-child > tbody:last-child > tr:last-child td:last-child,
.panel > .table:last-child > tfoot:last-child > tr:last-child td:last-child,
.panel > .table-responsive:last-child > .table:last-child > tfoot:last-child > tr:last-child td:last-child,
.panel > .table:last-child > tbody:last-child > tr:last-child th:last-child,
.panel > .table-responsive:last-child > .table:last-child > tbody:last-child > tr:last-child th:last-child,
.panel > .table:last-child > tfoot:last-child > tr:last-child th:last-child,
.panel > .table-responsive:last-child > .table:last-child > tfoot:last-child > tr:last-child th:last-child {
border-bottom-right-radius: 1px;
}
.panel > .panel-body + .table,
.panel > .panel-body + .table-responsive,
.panel > .table + .panel-body,
.panel > .table-responsive + .panel-body {
border-top: 1px solid #ddd;
}
.panel > .table > tbody:first-child > tr:first-child th,
.panel > .table > tbody:first-child > tr:first-child td {
border-top: 0;
}
.panel > .table-bordered,
.panel > .table-responsive > .table-bordered {
border: 0;
}
.panel > .table-bordered > thead > tr > th:first-child,
.panel > .table-responsive > .table-bordered > thead > tr > th:first-child,
.panel > .table-bordered > tbody > tr > th:first-child,
.panel > .table-responsive > .table-bordered > tbody > tr > th:first-child,
.panel > .table-bordered > tfoot > tr > th:first-child,
.panel > .table-responsive > .table-bordered > tfoot > tr > th:first-child,
.panel > .table-bordered > thead > tr > td:first-child,
.panel > .table-responsive > .table-bordered > thead > tr > td:first-child,
.panel > .table-bordered > tbody > tr > td:first-child,
.panel > .table-responsive > .table-bordered > tbody > tr > td:first-child,
.panel > .table-bordered > tfoot > tr > td:first-child,
.panel > .table-responsive > .table-bordered > tfoot > tr > td:first-child {
border-left: 0;
}
.panel > .table-bordered > thead > tr > th:last-child,
.panel > .table-responsive > .table-bordered > thead > tr > th:last-child,
.panel > .table-bordered > tbody > tr > th:last-child,
.panel > .table-responsive > .table-bordered > tbody > tr > th:last-child,
.panel > .table-bordered > tfoot > tr > th:last-child,
.panel > .table-responsive > .table-bordered > tfoot > tr > th:last-child,
.panel > .table-bordered > thead > tr > td:last-child,
.panel > .table-responsive > .table-bordered > thead > tr > td:last-child,
.panel > .table-bordered > tbody > tr > td:last-child,
.panel > .table-responsive > .table-bordered > tbody > tr > td:last-child,
.panel > .table-bordered > tfoot > tr > td:last-child,
.panel > .table-responsive > .table-bordered > tfoot > tr > td:last-child {
border-right: 0;
}
.panel > .table-bordered > thead > tr:first-child > td,
.panel > .table-responsive > .table-bordered > thead > tr:first-child > td,
.panel > .table-bordered > tbody > tr:first-child > td,
.panel > .table-responsive > .table-bordered > tbody > tr:first-child > td,
.panel > .table-bordered > thead > tr:first-child > th,
.panel > .table-responsive > .table-bordered > thead > tr:first-child > th,
.panel > .table-bordered > tbody > tr:first-child > th,
.panel > .table-responsive > .table-bordered > tbody > tr:first-child > th {
border-bottom: 0;
}
.panel > .table-bordered > tbody > tr:last-child > td,
.panel > .table-responsive > .table-bordered > tbody > tr:last-child > td,
.panel > .table-bordered > tfoot > tr:last-child > td,
.panel > .table-responsive > .table-bordered > tfoot > tr:last-child > td,
.panel > .table-bordered > tbody > tr:last-child > th,
.panel > .table-responsive > .table-bordered > tbody > tr:last-child > th,
.panel > .table-bordered > tfoot > tr:last-child > th,
.panel > .table-responsive > .table-bordered > tfoot > tr:last-child > th {
border-bottom: 0;
}
.panel > .table-responsive {
border: 0;
margin-bottom: 0;
}
.panel-group {
margin-bottom: 18px;
}
.panel-group .panel {
margin-bottom: 0;
border-radius: 2px;
}
.panel-group .panel + .panel {
margin-top: 5px;
}
.panel-group .panel-heading {
border-bottom: 0;
}
.panel-group .panel-heading + .panel-collapse > .panel-body,
.panel-group .panel-heading + .panel-collapse > .list-group {
border-top: 1px solid #ddd;
}
.panel-group .panel-footer {
border-top: 0;
}
.panel-group .panel-footer + .panel-collapse .panel-body {
border-bottom: 1px solid #ddd;
}
.panel-default {
border-color: #ddd;
}
.panel-default > .panel-heading {
color: #333333;
background-color: #f5f5f5;
border-color: #ddd;
}
.panel-default > .panel-heading + .panel-collapse > .panel-body {
border-top-color: #ddd;
}
.panel-default > .panel-heading .badge {
color: #f5f5f5;
background-color: #333333;
}
.panel-default > .panel-footer + .panel-collapse > .panel-body {
border-bottom-color: #ddd;
}
.panel-primary {
border-color: #337ab7;
}
.panel-primary > .panel-heading {
color: #fff;
background-color: #337ab7;
border-color: #337ab7;
}
.panel-primary > .panel-heading + .panel-collapse > .panel-body {
border-top-color: #337ab7;
}
.panel-primary > .panel-heading .badge {
color: #337ab7;
background-color: #fff;
}
.panel-primary > .panel-footer + .panel-collapse > .panel-body {
border-bottom-color: #337ab7;
}
.panel-success {
border-color: #d6e9c6;
}
.panel-success > .panel-heading {
color: #3c763d;
background-color: #dff0d8;
border-color: #d6e9c6;
}
.panel-success > .panel-heading + .panel-collapse > .panel-body {
border-top-color: #d6e9c6;
}
.panel-success > .panel-heading .badge {
color: #dff0d8;
background-color: #3c763d;
}
.panel-success > .panel-footer + .panel-collapse > .panel-body {
border-bottom-color: #d6e9c6;
}
.panel-info {
border-color: #bce8f1;
}
.panel-info > .panel-heading {
color: #31708f;
background-color: #d9edf7;
border-color: #bce8f1;
}
.panel-info > .panel-heading + .panel-collapse > .panel-body {
border-top-color: #bce8f1;
}
.panel-info > .panel-heading .badge {
color: #d9edf7;
background-color: #31708f;
}
.panel-info > .panel-footer + .panel-collapse > .panel-body {
border-bottom-color: #bce8f1;
}
.panel-warning {
border-color: #faebcc;
}
.panel-warning > .panel-heading {
color: #8a6d3b;
background-color: #fcf8e3;
border-color: #faebcc;
}
.panel-warning > .panel-heading + .panel-collapse > .panel-body {
border-top-color: #faebcc;
}
.panel-warning > .panel-heading .badge {
color: #fcf8e3;
background-color: #8a6d3b;
}
.panel-warning > .panel-footer + .panel-collapse > .panel-body {
border-bottom-color: #faebcc;
}
.panel-danger {
border-color: #ebccd1;
}
.panel-danger > .panel-heading {
color: #a94442;
background-color: #f2dede;
border-color: #ebccd1;
}
.panel-danger > .panel-heading + .panel-collapse > .panel-body {
border-top-color: #ebccd1;
}
.panel-danger > .panel-heading .badge {
color: #f2dede;
background-color: #a94442;
}
.panel-danger > .panel-footer + .panel-collapse > .panel-body {
border-bottom-color: #ebccd1;
}
.embed-responsive {
position: relative;
display: block;
height: 0;
padding: 0;
overflow: hidden;
}
.embed-responsive .embed-responsive-item,
.embed-responsive iframe,
.embed-responsive embed,
.embed-responsive object,
.embed-responsive video {
position: absolute;
top: 0;
left: 0;
bottom: 0;
height: 100%;
width: 100%;
border: 0;
}
.embed-responsive-16by9 {
padding-bottom: 56.25%;
}
.embed-responsive-4by3 {
padding-bottom: 75%;
}
.well {
min-height: 20px;
padding: 19px;
margin-bottom: 20px;
background-color: #f5f5f5;
border: 1px solid #e3e3e3;
border-radius: 2px;
-webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.05);
box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.05);
}
.well blockquote {
border-color: #ddd;
border-color: rgba(0, 0, 0, 0.15);
}
.well-lg {
padding: 24px;
border-radius: 3px;
}
.well-sm {
padding: 9px;
border-radius: 1px;
}
.close {
float: right;
font-size: 19.5px;
font-weight: bold;
line-height: 1;
color: #000;
text-shadow: 0 1px 0 #fff;
opacity: 0.2;
filter: alpha(opacity=20);
}
.close:hover,
.close:focus {
color: #000;
text-decoration: none;
cursor: pointer;
opacity: 0.5;
filter: alpha(opacity=50);
}
button.close {
padding: 0;
cursor: pointer;
background: transparent;
border: 0;
-webkit-appearance: none;
}
.modal-open {
overflow: hidden;
}
.modal {
display: none;
overflow: hidden;
position: fixed;
top: 0;
right: 0;
bottom: 0;
left: 0;
z-index: 1050;
-webkit-overflow-scrolling: touch;
outline: 0;
}
.modal.fade .modal-dialog {
-webkit-transform: translate(0, -25%);
-ms-transform: translate(0, -25%);
-o-transform: translate(0, -25%);
transform: translate(0, -25%);
-webkit-transition: -webkit-transform 0.3s ease-out;
-moz-transition: -moz-transform 0.3s ease-out;
-o-transition: -o-transform 0.3s ease-out;
transition: transform 0.3s ease-out;
}
.modal.in .modal-dialog {
-webkit-transform: translate(0, 0);
-ms-transform: translate(0, 0);
-o-transform: translate(0, 0);
transform: translate(0, 0);
}
.modal-open .modal {
overflow-x: hidden;
overflow-y: auto;
}
.modal-dialog {
position: relative;
width: auto;
margin: 10px;
}
.modal-content {
position: relative;
background-color: #fff;
border: 1px solid #999;
border: 1px solid rgba(0, 0, 0, 0.2);
border-radius: 3px;
-webkit-box-shadow: 0 3px 9px rgba(0, 0, 0, 0.5);
box-shadow: 0 3px 9px rgba(0, 0, 0, 0.5);
background-clip: padding-box;
outline: 0;
}
.modal-backdrop {
position: fixed;
top: 0;
right: 0;
bottom: 0;
left: 0;
z-index: 1040;
background-color: #000;
}
.modal-backdrop.fade {
opacity: 0;
filter: alpha(opacity=0);
}
.modal-backdrop.in {
opacity: 0.5;
filter: alpha(opacity=50);
}
.modal-header {
padding: 15px;
border-bottom: 1px solid #e5e5e5;
}
.modal-header .close {
margin-top: -2px;
}
.modal-title {
margin: 0;
line-height: 1.42857143;
}
.modal-body {
position: relative;
padding: 15px;
}
.modal-footer {
padding: 15px;
text-align: right;
border-top: 1px solid #e5e5e5;
}
.modal-footer .btn + .btn {
margin-left: 5px;
margin-bottom: 0;
}
.modal-footer .btn-group .btn + .btn {
margin-left: -1px;
}
.modal-footer .btn-block + .btn-block {
margin-left: 0;
}
.modal-scrollbar-measure {
position: absolute;
top: -9999px;
width: 50px;
height: 50px;
overflow: scroll;
}
@media (min-width: 768px) {
.modal-dialog {
width: 600px;
margin: 30px auto;
}
.modal-content {
-webkit-box-shadow: 0 5px 15px rgba(0, 0, 0, 0.5);
box-shadow: 0 5px 15px rgba(0, 0, 0, 0.5);
}
.modal-sm {
width: 300px;
}
}
@media (min-width: 992px) {
.modal-lg {
width: 900px;
}
}
.tooltip {
position: absolute;
z-index: 1070;
display: block;
font-family: "Helvetica Neue", Helvetica, Arial, sans-serif;
font-style: normal;
font-weight: normal;
letter-spacing: normal;
line-break: auto;
line-height: 1.42857143;
text-align: left;
text-align: start;
text-decoration: none;
text-shadow: none;
text-transform: none;
white-space: normal;
word-break: normal;
word-spacing: normal;
word-wrap: normal;
font-size: 12px;
opacity: 0;
filter: alpha(opacity=0);
}
.tooltip.in {
opacity: 0.9;
filter: alpha(opacity=90);
}
.tooltip.top {
margin-top: -3px;
padding: 5px 0;
}
.tooltip.right {
margin-left: 3px;
padding: 0 5px;
}
.tooltip.bottom {
margin-top: 3px;
padding: 5px 0;
}
.tooltip.left {
margin-left: -3px;
padding: 0 5px;
}
.tooltip-inner {
max-width: 200px;
padding: 3px 8px;
color: #fff;
text-align: center;
background-color: #000;
border-radius: 2px;
}
.tooltip-arrow {
position: absolute;
width: 0;
height: 0;
border-color: transparent;
border-style: solid;
}
.tooltip.top .tooltip-arrow {
bottom: 0;
left: 50%;
margin-left: -5px;
border-width: 5px 5px 0;
border-top-color: #000;
}
.tooltip.top-left .tooltip-arrow {
bottom: 0;
right: 5px;
margin-bottom: -5px;
border-width: 5px 5px 0;
border-top-color: #000;
}
.tooltip.top-right .tooltip-arrow {
bottom: 0;
left: 5px;
margin-bottom: -5px;
border-width: 5px 5px 0;
border-top-color: #000;
}
.tooltip.right .tooltip-arrow {
top: 50%;
left: 0;
margin-top: -5px;
border-width: 5px 5px 5px 0;
border-right-color: #000;
}
.tooltip.left .tooltip-arrow {
top: 50%;
right: 0;
margin-top: -5px;
border-width: 5px 0 5px 5px;
border-left-color: #000;
}
.tooltip.bottom .tooltip-arrow {
top: 0;
left: 50%;
margin-left: -5px;
border-width: 0 5px 5px;
border-bottom-color: #000;
}
.tooltip.bottom-left .tooltip-arrow {
top: 0;
right: 5px;
margin-top: -5px;
border-width: 0 5px 5px;
border-bottom-color: #000;
}
.tooltip.bottom-right .tooltip-arrow {
top: 0;
left: 5px;
margin-top: -5px;
border-width: 0 5px 5px;
border-bottom-color: #000;
}
.popover {
position: absolute;
top: 0;
left: 0;
z-index: 1060;
display: none;
max-width: 276px;
padding: 1px;
font-family: "Helvetica Neue", Helvetica, Arial, sans-serif;
font-style: normal;
font-weight: normal;
letter-spacing: normal;
line-break: auto;
line-height: 1.42857143;
text-align: left;
text-align: start;
text-decoration: none;
text-shadow: none;
text-transform: none;
white-space: normal;
word-break: normal;
word-spacing: normal;
word-wrap: normal;
font-size: 13px;
background-color: #fff;
background-clip: padding-box;
border: 1px solid #ccc;
border: 1px solid rgba(0, 0, 0, 0.2);
border-radius: 3px;
-webkit-box-shadow: 0 5px 10px rgba(0, 0, 0, 0.2);
box-shadow: 0 5px 10px rgba(0, 0, 0, 0.2);
}
.popover.top {
margin-top: -10px;
}
.popover.right {
margin-left: 10px;
}
.popover.bottom {
margin-top: 10px;
}
.popover.left {
margin-left: -10px;
}
.popover-title {
margin: 0;
padding: 8px 14px;
font-size: 13px;
background-color: #f7f7f7;
border-bottom: 1px solid #ebebeb;
border-radius: 2px 2px 0 0;
}
.popover-content {
padding: 9px 14px;
}
.popover > .arrow,
.popover > .arrow:after {
position: absolute;
display: block;
width: 0;
height: 0;
border-color: transparent;
border-style: solid;
}
.popover > .arrow {
border-width: 11px;
}
.popover > .arrow:after {
border-width: 10px;
content: "";
}
.popover.top > .arrow {
left: 50%;
margin-left: -11px;
border-bottom-width: 0;
border-top-color: #999999;
border-top-color: rgba(0, 0, 0, 0.25);
bottom: -11px;
}
.popover.top > .arrow:after {
content: " ";
bottom: 1px;
margin-left: -10px;
border-bottom-width: 0;
border-top-color: #fff;
}
.popover.right > .arrow {
top: 50%;
left: -11px;
margin-top: -11px;
border-left-width: 0;
border-right-color: #999999;
border-right-color: rgba(0, 0, 0, 0.25);
}
.popover.right > .arrow:after {
content: " ";
left: 1px;
bottom: -10px;
border-left-width: 0;
border-right-color: #fff;
}
.popover.bottom > .arrow {
left: 50%;
margin-left: -11px;
border-top-width: 0;
border-bottom-color: #999999;
border-bottom-color: rgba(0, 0, 0, 0.25);
top: -11px;
}
.popover.bottom > .arrow:after {
content: " ";
top: 1px;
margin-left: -10px;
border-top-width: 0;
border-bottom-color: #fff;
}
.popover.left > .arrow {
top: 50%;
right: -11px;
margin-top: -11px;
border-right-width: 0;
border-left-color: #999999;
border-left-color: rgba(0, 0, 0, 0.25);
}
.popover.left > .arrow:after {
content: " ";
right: 1px;
border-right-width: 0;
border-left-color: #fff;
bottom: -10px;
}
.carousel {
position: relative;
}
.carousel-inner {
position: relative;
overflow: hidden;
width: 100%;
}
.carousel-inner > .item {
display: none;
position: relative;
-webkit-transition: 0.6s ease-in-out left;
-o-transition: 0.6s ease-in-out left;
transition: 0.6s ease-in-out left;
}
.carousel-inner > .item > img,
.carousel-inner > .item > a > img {
line-height: 1;
}
@media all and (transform-3d), (-webkit-transform-3d) {
.carousel-inner > .item {
-webkit-transition: -webkit-transform 0.6s ease-in-out;
-moz-transition: -moz-transform 0.6s ease-in-out;
-o-transition: -o-transform 0.6s ease-in-out;
transition: transform 0.6s ease-in-out;
-webkit-backface-visibility: hidden;
-moz-backface-visibility: hidden;
backface-visibility: hidden;
-webkit-perspective: 1000px;
-moz-perspective: 1000px;
perspective: 1000px;
}
.carousel-inner > .item.next,
.carousel-inner > .item.active.right {
-webkit-transform: translate3d(100%, 0, 0);
transform: translate3d(100%, 0, 0);
left: 0;
}
.carousel-inner > .item.prev,
.carousel-inner > .item.active.left {
-webkit-transform: translate3d(-100%, 0, 0);
transform: translate3d(-100%, 0, 0);
left: 0;
}
.carousel-inner > .item.next.left,
.carousel-inner > .item.prev.right,
.carousel-inner > .item.active {
-webkit-transform: translate3d(0, 0, 0);
transform: translate3d(0, 0, 0);
left: 0;
}
}
.carousel-inner > .active,
.carousel-inner > .next,
.carousel-inner > .prev {
display: block;
}
.carousel-inner > .active {
left: 0;
}
.carousel-inner > .next,
.carousel-inner > .prev {
position: absolute;
top: 0;
width: 100%;
}
.carousel-inner > .next {
left: 100%;
}
.carousel-inner > .prev {
left: -100%;
}
.carousel-inner > .next.left,
.carousel-inner > .prev.right {
left: 0;
}
.carousel-inner > .active.left {
left: -100%;
}
.carousel-inner > .active.right {
left: 100%;
}
.carousel-control {
position: absolute;
top: 0;
left: 0;
bottom: 0;
width: 15%;
opacity: 0.5;
filter: alpha(opacity=50);
font-size: 20px;
color: #fff;
text-align: center;
text-shadow: 0 1px 2px rgba(0, 0, 0, 0.6);
background-color: rgba(0, 0, 0, 0);
}
.carousel-control.left {
background-image: -webkit-linear-gradient(left, rgba(0, 0, 0, 0.5) 0%, rgba(0, 0, 0, 0.0001) 100%);
background-image: -o-linear-gradient(left, rgba(0, 0, 0, 0.5) 0%, rgba(0, 0, 0, 0.0001) 100%);
background-image: linear-gradient(to right, rgba(0, 0, 0, 0.5) 0%, rgba(0, 0, 0, 0.0001) 100%);
background-repeat: repeat-x;
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#80000000', endColorstr='#00000000', GradientType=1);
}
.carousel-control.right {
left: auto;
right: 0;
background-image: -webkit-linear-gradient(left, rgba(0, 0, 0, 0.0001) 0%, rgba(0, 0, 0, 0.5) 100%);
background-image: -o-linear-gradient(left, rgba(0, 0, 0, 0.0001) 0%, rgba(0, 0, 0, 0.5) 100%);
background-image: linear-gradient(to right, rgba(0, 0, 0, 0.0001) 0%, rgba(0, 0, 0, 0.5) 100%);
background-repeat: repeat-x;
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#00000000', endColorstr='#80000000', GradientType=1);
}
.carousel-control:hover,
.carousel-control:focus {
outline: 0;
color: #fff;
text-decoration: none;
opacity: 0.9;
filter: alpha(opacity=90);
}
.carousel-control .icon-prev,
.carousel-control .icon-next,
.carousel-control .glyphicon-chevron-left,
.carousel-control .glyphicon-chevron-right {
position: absolute;
top: 50%;
margin-top: -10px;
z-index: 5;
display: inline-block;
}
.carousel-control .icon-prev,
.carousel-control .glyphicon-chevron-left {
left: 50%;
margin-left: -10px;
}
.carousel-control .icon-next,
.carousel-control .glyphicon-chevron-right {
right: 50%;
margin-right: -10px;
}
.carousel-control .icon-prev,
.carousel-control .icon-next {
width: 20px;
height: 20px;
line-height: 1;
font-family: serif;
}
.carousel-control .icon-prev:before {
content: '\2039';
}
.carousel-control .icon-next:before {
content: '\203a';
}
.carousel-indicators {
position: absolute;
bottom: 10px;
left: 50%;
z-index: 15;
width: 60%;
margin-left: -30%;
padding-left: 0;
list-style: none;
text-align: center;
}
.carousel-indicators li {
display: inline-block;
width: 10px;
height: 10px;
margin: 1px;
text-indent: -999px;
border: 1px solid #fff;
border-radius: 10px;
cursor: pointer;
background-color: #000 \9;
background-color: rgba(0, 0, 0, 0);
}
.carousel-indicators .active {
margin: 0;
width: 12px;
height: 12px;
background-color: #fff;
}
.carousel-caption {
position: absolute;
left: 15%;
right: 15%;
bottom: 20px;
z-index: 10;
padding-top: 20px;
padding-bottom: 20px;
color: #fff;
text-align: center;
text-shadow: 0 1px 2px rgba(0, 0, 0, 0.6);
}
.carousel-caption .btn {
text-shadow: none;
}
@media screen and (min-width: 768px) {
.carousel-control .glyphicon-chevron-left,
.carousel-control .glyphicon-chevron-right,
.carousel-control .icon-prev,
.carousel-control .icon-next {
width: 30px;
height: 30px;
margin-top: -10px;
font-size: 30px;
}
.carousel-control .glyphicon-chevron-left,
.carousel-control .icon-prev {
margin-left: -10px;
}
.carousel-control .glyphicon-chevron-right,
.carousel-control .icon-next {
margin-right: -10px;
}
.carousel-caption {
left: 20%;
right: 20%;
padding-bottom: 30px;
}
.carousel-indicators {
bottom: 20px;
}
}
.clearfix:before,
.clearfix:after,
.dl-horizontal dd:before,
.dl-horizontal dd:after,
.container:before,
.container:after,
.container-fluid:before,
.container-fluid:after,
.row:before,
.row:after,
.form-horizontal .form-group:before,
.form-horizontal .form-group:after,
.btn-toolbar:before,
.btn-toolbar:after,
.btn-group-vertical > .btn-group:before,
.btn-group-vertical > .btn-group:after,
.nav:before,
.nav:after,
.navbar:before,
.navbar:after,
.navbar-header:before,
.navbar-header:after,
.navbar-collapse:before,
.navbar-collapse:after,
.pager:before,
.pager:after,
.panel-body:before,
.panel-body:after,
.modal-header:before,
.modal-header:after,
.modal-footer:before,
.modal-footer:after,
.item_buttons:before,
.item_buttons:after {
content: " ";
display: table;
}
.clearfix:after,
.dl-horizontal dd:after,
.container:after,
.container-fluid:after,
.row:after,
.form-horizontal .form-group:after,
.btn-toolbar:after,
.btn-group-vertical > .btn-group:after,
.nav:after,
.navbar:after,
.navbar-header:after,
.navbar-collapse:after,
.pager:after,
.panel-body:after,
.modal-header:after,
.modal-footer:after,
.item_buttons:after {
clear: both;
}
.center-block {
display: block;
margin-left: auto;
margin-right: auto;
}
.pull-right {
float: right !important;
}
.pull-left {
float: left !important;
}
.hide {
display: none !important;
}
.show {
display: block !important;
}
.invisible {
visibility: hidden;
}
.text-hide {
font: 0/0 a;
color: transparent;
text-shadow: none;
background-color: transparent;
border: 0;
}
.hidden {
display: none !important;
}
.affix {
position: fixed;
}
@-ms-viewport {
width: device-width;
}
.visible-xs,
.visible-sm,
.visible-md,
.visible-lg {
display: none !important;
}
.visible-xs-block,
.visible-xs-inline,
.visible-xs-inline-block,
.visible-sm-block,
.visible-sm-inline,
.visible-sm-inline-block,
.visible-md-block,
.visible-md-inline,
.visible-md-inline-block,
.visible-lg-block,
.visible-lg-inline,
.visible-lg-inline-block {
display: none !important;
}
@media (max-width: 767px) {
.visible-xs {
display: block !important;
}
table.visible-xs {
display: table !important;
}
tr.visible-xs {
display: table-row !important;
}
th.visible-xs,
td.visible-xs {
display: table-cell !important;
}
}
@media (max-width: 767px) {
.visible-xs-block {
display: block !important;
}
}
@media (max-width: 767px) {
.visible-xs-inline {
display: inline !important;
}
}
@media (max-width: 767px) {
.visible-xs-inline-block {
display: inline-block !important;
}
}
@media (min-width: 768px) and (max-width: 991px) {
.visible-sm {
display: block !important;
}
table.visible-sm {
display: table !important;
}
tr.visible-sm {
display: table-row !important;
}
th.visible-sm,
td.visible-sm {
display: table-cell !important;
}
}
@media (min-width: 768px) and (max-width: 991px) {
.visible-sm-block {
display: block !important;
}
}
@media (min-width: 768px) and (max-width: 991px) {
.visible-sm-inline {
display: inline !important;
}
}
@media (min-width: 768px) and (max-width: 991px) {
.visible-sm-inline-block {
display: inline-block !important;
}
}
@media (min-width: 992px) and (max-width: 1199px) {
.visible-md {
display: block !important;
}
table.visible-md {
display: table !important;
}
tr.visible-md {
display: table-row !important;
}
th.visible-md,
td.visible-md {
display: table-cell !important;
}
}
@media (min-width: 992px) and (max-width: 1199px) {
.visible-md-block {
display: block !important;
}
}
@media (min-width: 992px) and (max-width: 1199px) {
.visible-md-inline {
display: inline !important;
}
}
@media (min-width: 992px) and (max-width: 1199px) {
.visible-md-inline-block {
display: inline-block !important;
}
}
@media (min-width: 1200px) {
.visible-lg {
display: block !important;
}
table.visible-lg {
display: table !important;
}
tr.visible-lg {
display: table-row !important;
}
th.visible-lg,
td.visible-lg {
display: table-cell !important;
}
}
@media (min-width: 1200px) {
.visible-lg-block {
display: block !important;
}
}
@media (min-width: 1200px) {
.visible-lg-inline {
display: inline !important;
}
}
@media (min-width: 1200px) {
.visible-lg-inline-block {
display: inline-block !important;
}
}
@media (max-width: 767px) {
.hidden-xs {
display: none !important;
}
}
@media (min-width: 768px) and (max-width: 991px) {
.hidden-sm {
display: none !important;
}
}
@media (min-width: 992px) and (max-width: 1199px) {
.hidden-md {
display: none !important;
}
}
@media (min-width: 1200px) {
.hidden-lg {
display: none !important;
}
}
.visible-print {
display: none !important;
}
@media print {
.visible-print {
display: block !important;
}
table.visible-print {
display: table !important;
}
tr.visible-print {
display: table-row !important;
}
th.visible-print,
td.visible-print {
display: table-cell !important;
}
}
.visible-print-block {
display: none !important;
}
@media print {
.visible-print-block {
display: block !important;
}
}
.visible-print-inline {
display: none !important;
}
@media print {
.visible-print-inline {
display: inline !important;
}
}
.visible-print-inline-block {
display: none !important;
}
@media print {
.visible-print-inline-block {
display: inline-block !important;
}
}
@media print {
.hidden-print {
display: none !important;
}
}
/*!
*
* Font Awesome
*
*/
/*!
* Font Awesome 4.2.0 by @davegandy - http://fontawesome.io - @fontawesome
* License - http://fontawesome.io/license (Font: SIL OFL 1.1, CSS: MIT License)
*/
/* FONT PATH
* -------------------------- */
@font-face {
font-family: 'FontAwesome';
src: url('../components/font-awesome/fonts/fontawesome-webfont.eot?v=4.2.0');
src: url('../components/font-awesome/fonts/fontawesome-webfont.eot?#iefix&v=4.2.0') format('embedded-opentype'), url('../components/font-awesome/fonts/fontawesome-webfont.woff?v=4.2.0') format('woff'), url('../components/font-awesome/fonts/fontawesome-webfont.ttf?v=4.2.0') format('truetype'), url('../components/font-awesome/fonts/fontawesome-webfont.svg?v=4.2.0#fontawesomeregular') format('svg');
font-weight: normal;
font-style: normal;
}
.fa {
display: inline-block;
font: normal normal normal 14px/1 FontAwesome;
font-size: inherit;
text-rendering: auto;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
}
/* makes the font 33% larger relative to the icon container */
.fa-lg {
font-size: 1.33333333em;
line-height: 0.75em;
vertical-align: -15%;
}
.fa-2x {
font-size: 2em;
}
.fa-3x {
font-size: 3em;
}
.fa-4x {
font-size: 4em;
}
.fa-5x {
font-size: 5em;
}
.fa-fw {
width: 1.28571429em;
text-align: center;
}
.fa-ul {
padding-left: 0;
margin-left: 2.14285714em;
list-style-type: none;
}
.fa-ul > li {
position: relative;
}
.fa-li {
position: absolute;
left: -2.14285714em;
width: 2.14285714em;
top: 0.14285714em;
text-align: center;
}
.fa-li.fa-lg {
left: -1.85714286em;
}
.fa-border {
padding: .2em .25em .15em;
border: solid 0.08em #eee;
border-radius: .1em;
}
.pull-right {
float: right;
}
.pull-left {
float: left;
}
.fa.pull-left {
margin-right: .3em;
}
.fa.pull-right {
margin-left: .3em;
}
.fa-spin {
-webkit-animation: fa-spin 2s infinite linear;
animation: fa-spin 2s infinite linear;
}
@-webkit-keyframes fa-spin {
0% {
-webkit-transform: rotate(0deg);
transform: rotate(0deg);
}
100% {
-webkit-transform: rotate(359deg);
transform: rotate(359deg);
}
}
@keyframes fa-spin {
0% {
-webkit-transform: rotate(0deg);
transform: rotate(0deg);
}
100% {
-webkit-transform: rotate(359deg);
transform: rotate(359deg);
}
}
.fa-rotate-90 {
filter: progid:DXImageTransform.Microsoft.BasicImage(rotation=1);
-webkit-transform: rotate(90deg);
-ms-transform: rotate(90deg);
transform: rotate(90deg);
}
.fa-rotate-180 {
filter: progid:DXImageTransform.Microsoft.BasicImage(rotation=2);
-webkit-transform: rotate(180deg);
-ms-transform: rotate(180deg);
transform: rotate(180deg);
}
.fa-rotate-270 {
filter: progid:DXImageTransform.Microsoft.BasicImage(rotation=3);
-webkit-transform: rotate(270deg);
-ms-transform: rotate(270deg);
transform: rotate(270deg);
}
.fa-flip-horizontal {
filter: progid:DXImageTransform.Microsoft.BasicImage(rotation=0, mirror=1);
-webkit-transform: scale(-1, 1);
-ms-transform: scale(-1, 1);
transform: scale(-1, 1);
}
.fa-flip-vertical {
filter: progid:DXImageTransform.Microsoft.BasicImage(rotation=2, mirror=1);
-webkit-transform: scale(1, -1);
-ms-transform: scale(1, -1);
transform: scale(1, -1);
}
:root .fa-rotate-90,
:root .fa-rotate-180,
:root .fa-rotate-270,
:root .fa-flip-horizontal,
:root .fa-flip-vertical {
filter: none;
}
.fa-stack {
position: relative;
display: inline-block;
width: 2em;
height: 2em;
line-height: 2em;
vertical-align: middle;
}
.fa-stack-1x,
.fa-stack-2x {
position: absolute;
left: 0;
width: 100%;
text-align: center;
}
.fa-stack-1x {
line-height: inherit;
}
.fa-stack-2x {
font-size: 2em;
}
.fa-inverse {
color: #fff;
}
/* Font Awesome uses the Unicode Private Use Area (PUA) to ensure screen
readers do not read off random characters that represent icons */
.fa-glass:before {
content: "\f000";
}
.fa-music:before {
content: "\f001";
}
.fa-search:before {
content: "\f002";
}
.fa-envelope-o:before {
content: "\f003";
}
.fa-heart:before {
content: "\f004";
}
.fa-star:before {
content: "\f005";
}
.fa-star-o:before {
content: "\f006";
}
.fa-user:before {
content: "\f007";
}
.fa-film:before {
content: "\f008";
}
.fa-th-large:before {
content: "\f009";
}
.fa-th:before {
content: "\f00a";
}
.fa-th-list:before {
content: "\f00b";
}
.fa-check:before {
content: "\f00c";
}
.fa-remove:before,
.fa-close:before,
.fa-times:before {
content: "\f00d";
}
.fa-search-plus:before {
content: "\f00e";
}
.fa-search-minus:before {
content: "\f010";
}
.fa-power-off:before {
content: "\f011";
}
.fa-signal:before {
content: "\f012";
}
.fa-gear:before,
.fa-cog:before {
content: "\f013";
}
.fa-trash-o:before {
content: "\f014";
}
.fa-home:before {
content: "\f015";
}
.fa-file-o:before {
content: "\f016";
}
.fa-clock-o:before {
content: "\f017";
}
.fa-road:before {
content: "\f018";
}
.fa-download:before {
content: "\f019";
}
.fa-arrow-circle-o-down:before {
content: "\f01a";
}
.fa-arrow-circle-o-up:before {
content: "\f01b";
}
.fa-inbox:before {
content: "\f01c";
}
.fa-play-circle-o:before {
content: "\f01d";
}
.fa-rotate-right:before,
.fa-repeat:before {
content: "\f01e";
}
.fa-refresh:before {
content: "\f021";
}
.fa-list-alt:before {
content: "\f022";
}
.fa-lock:before {
content: "\f023";
}
.fa-flag:before {
content: "\f024";
}
.fa-headphones:before {
content: "\f025";
}
.fa-volume-off:before {
content: "\f026";
}
.fa-volume-down:before {
content: "\f027";
}
.fa-volume-up:before {
content: "\f028";
}
.fa-qrcode:before {
content: "\f029";
}
.fa-barcode:before {
content: "\f02a";
}
.fa-tag:before {
content: "\f02b";
}
.fa-tags:before {
content: "\f02c";
}
.fa-book:before {
content: "\f02d";
}
.fa-bookmark:before {
content: "\f02e";
}
.fa-print:before {
content: "\f02f";
}
.fa-camera:before {
content: "\f030";
}
.fa-font:before {
content: "\f031";
}
.fa-bold:before {
content: "\f032";
}
.fa-italic:before {
content: "\f033";
}
.fa-text-height:before {
content: "\f034";
}
.fa-text-width:before {
content: "\f035";
}
.fa-align-left:before {
content: "\f036";
}
.fa-align-center:before {
content: "\f037";
}
.fa-align-right:before {
content: "\f038";
}
.fa-align-justify:before {
content: "\f039";
}
.fa-list:before {
content: "\f03a";
}
.fa-dedent:before,
.fa-outdent:before {
content: "\f03b";
}
.fa-indent:before {
content: "\f03c";
}
.fa-video-camera:before {
content: "\f03d";
}
.fa-photo:before,
.fa-image:before,
.fa-picture-o:before {
content: "\f03e";
}
.fa-pencil:before {
content: "\f040";
}
.fa-map-marker:before {
content: "\f041";
}
.fa-adjust:before {
content: "\f042";
}
.fa-tint:before {
content: "\f043";
}
.fa-edit:before,
.fa-pencil-square-o:before {
content: "\f044";
}
.fa-share-square-o:before {
content: "\f045";
}
.fa-check-square-o:before {
content: "\f046";
}
.fa-arrows:before {
content: "\f047";
}
.fa-step-backward:before {
content: "\f048";
}
.fa-fast-backward:before {
content: "\f049";
}
.fa-backward:before {
content: "\f04a";
}
.fa-play:before {
content: "\f04b";
}
.fa-pause:before {
content: "\f04c";
}
.fa-stop:before {
content: "\f04d";
}
.fa-forward:before {
content: "\f04e";
}
.fa-fast-forward:before {
content: "\f050";
}
.fa-step-forward:before {
content: "\f051";
}
.fa-eject:before {
content: "\f052";
}
.fa-chevron-left:before {
content: "\f053";
}
.fa-chevron-right:before {
content: "\f054";
}
.fa-plus-circle:before {
content: "\f055";
}
.fa-minus-circle:before {
content: "\f056";
}
.fa-times-circle:before {
content: "\f057";
}
.fa-check-circle:before {
content: "\f058";
}
.fa-question-circle:before {
content: "\f059";
}
.fa-info-circle:before {
content: "\f05a";
}
.fa-crosshairs:before {
content: "\f05b";
}
.fa-times-circle-o:before {
content: "\f05c";
}
.fa-check-circle-o:before {
content: "\f05d";
}
.fa-ban:before {
content: "\f05e";
}
.fa-arrow-left:before {
content: "\f060";
}
.fa-arrow-right:before {
content: "\f061";
}
.fa-arrow-up:before {
content: "\f062";
}
.fa-arrow-down:before {
content: "\f063";
}
.fa-mail-forward:before,
.fa-share:before {
content: "\f064";
}
.fa-expand:before {
content: "\f065";
}
.fa-compress:before {
content: "\f066";
}
.fa-plus:before {
content: "\f067";
}
.fa-minus:before {
content: "\f068";
}
.fa-asterisk:before {
content: "\f069";
}
.fa-exclamation-circle:before {
content: "\f06a";
}
.fa-gift:before {
content: "\f06b";
}
.fa-leaf:before {
content: "\f06c";
}
.fa-fire:before {
content: "\f06d";
}
.fa-eye:before {
content: "\f06e";
}
.fa-eye-slash:before {
content: "\f070";
}
.fa-warning:before,
.fa-exclamation-triangle:before {
content: "\f071";
}
.fa-plane:before {
content: "\f072";
}
.fa-calendar:before {
content: "\f073";
}
.fa-random:before {
content: "\f074";
}
.fa-comment:before {
content: "\f075";
}
.fa-magnet:before {
content: "\f076";
}
.fa-chevron-up:before {
content: "\f077";
}
.fa-chevron-down:before {
content: "\f078";
}
.fa-retweet:before {
content: "\f079";
}
.fa-shopping-cart:before {
content: "\f07a";
}
.fa-folder:before {
content: "\f07b";
}
.fa-folder-open:before {
content: "\f07c";
}
.fa-arrows-v:before {
content: "\f07d";
}
.fa-arrows-h:before {
content: "\f07e";
}
.fa-bar-chart-o:before,
.fa-bar-chart:before {
content: "\f080";
}
.fa-twitter-square:before {
content: "\f081";
}
.fa-facebook-square:before {
content: "\f082";
}
.fa-camera-retro:before {
content: "\f083";
}
.fa-key:before {
content: "\f084";
}
.fa-gears:before,
.fa-cogs:before {
content: "\f085";
}
.fa-comments:before {
content: "\f086";
}
.fa-thumbs-o-up:before {
content: "\f087";
}
.fa-thumbs-o-down:before {
content: "\f088";
}
.fa-star-half:before {
content: "\f089";
}
.fa-heart-o:before {
content: "\f08a";
}
.fa-sign-out:before {
content: "\f08b";
}
.fa-linkedin-square:before {
content: "\f08c";
}
.fa-thumb-tack:before {
content: "\f08d";
}
.fa-external-link:before {
content: "\f08e";
}
.fa-sign-in:before {
content: "\f090";
}
.fa-trophy:before {
content: "\f091";
}
.fa-github-square:before {
content: "\f092";
}
.fa-upload:before {
content: "\f093";
}
.fa-lemon-o:before {
content: "\f094";
}
.fa-phone:before {
content: "\f095";
}
.fa-square-o:before {
content: "\f096";
}
.fa-bookmark-o:before {
content: "\f097";
}
.fa-phone-square:before {
content: "\f098";
}
.fa-twitter:before {
content: "\f099";
}
.fa-facebook:before {
content: "\f09a";
}
.fa-github:before {
content: "\f09b";
}
.fa-unlock:before {
content: "\f09c";
}
.fa-credit-card:before {
content: "\f09d";
}
.fa-rss:before {
content: "\f09e";
}
.fa-hdd-o:before {
content: "\f0a0";
}
.fa-bullhorn:before {
content: "\f0a1";
}
.fa-bell:before {
content: "\f0f3";
}
.fa-certificate:before {
content: "\f0a3";
}
.fa-hand-o-right:before {
content: "\f0a4";
}
.fa-hand-o-left:before {
content: "\f0a5";
}
.fa-hand-o-up:before {
content: "\f0a6";
}
.fa-hand-o-down:before {
content: "\f0a7";
}
.fa-arrow-circle-left:before {
content: "\f0a8";
}
.fa-arrow-circle-right:before {
content: "\f0a9";
}
.fa-arrow-circle-up:before {
content: "\f0aa";
}
.fa-arrow-circle-down:before {
content: "\f0ab";
}
.fa-globe:before {
content: "\f0ac";
}
.fa-wrench:before {
content: "\f0ad";
}
.fa-tasks:before {
content: "\f0ae";
}
.fa-filter:before {
content: "\f0b0";
}
.fa-briefcase:before {
content: "\f0b1";
}
.fa-arrows-alt:before {
content: "\f0b2";
}
.fa-group:before,
.fa-users:before {
content: "\f0c0";
}
.fa-chain:before,
.fa-link:before {
content: "\f0c1";
}
.fa-cloud:before {
content: "\f0c2";
}
.fa-flask:before {
content: "\f0c3";
}
.fa-cut:before,
.fa-scissors:before {
content: "\f0c4";
}
.fa-copy:before,
.fa-files-o:before {
content: "\f0c5";
}
.fa-paperclip:before {
content: "\f0c6";
}
.fa-save:before,
.fa-floppy-o:before {
content: "\f0c7";
}
.fa-square:before {
content: "\f0c8";
}
.fa-navicon:before,
.fa-reorder:before,
.fa-bars:before {
content: "\f0c9";
}
.fa-list-ul:before {
content: "\f0ca";
}
.fa-list-ol:before {
content: "\f0cb";
}
.fa-strikethrough:before {
content: "\f0cc";
}
.fa-underline:before {
content: "\f0cd";
}
.fa-table:before {
content: "\f0ce";
}
.fa-magic:before {
content: "\f0d0";
}
.fa-truck:before {
content: "\f0d1";
}
.fa-pinterest:before {
content: "\f0d2";
}
.fa-pinterest-square:before {
content: "\f0d3";
}
.fa-google-plus-square:before {
content: "\f0d4";
}
.fa-google-plus:before {
content: "\f0d5";
}
.fa-money:before {
content: "\f0d6";
}
.fa-caret-down:before {
content: "\f0d7";
}
.fa-caret-up:before {
content: "\f0d8";
}
.fa-caret-left:before {
content: "\f0d9";
}
.fa-caret-right:before {
content: "\f0da";
}
.fa-columns:before {
content: "\f0db";
}
.fa-unsorted:before,
.fa-sort:before {
content: "\f0dc";
}
.fa-sort-down:before,
.fa-sort-desc:before {
content: "\f0dd";
}
.fa-sort-up:before,
.fa-sort-asc:before {
content: "\f0de";
}
.fa-envelope:before {
content: "\f0e0";
}
.fa-linkedin:before {
content: "\f0e1";
}
.fa-rotate-left:before,
.fa-undo:before {
content: "\f0e2";
}
.fa-legal:before,
.fa-gavel:before {
content: "\f0e3";
}
.fa-dashboard:before,
.fa-tachometer:before {
content: "\f0e4";
}
.fa-comment-o:before {
content: "\f0e5";
}
.fa-comments-o:before {
content: "\f0e6";
}
.fa-flash:before,
.fa-bolt:before {
content: "\f0e7";
}
.fa-sitemap:before {
content: "\f0e8";
}
.fa-umbrella:before {
content: "\f0e9";
}
.fa-paste:before,
.fa-clipboard:before {
content: "\f0ea";
}
.fa-lightbulb-o:before {
content: "\f0eb";
}
.fa-exchange:before {
content: "\f0ec";
}
.fa-cloud-download:before {
content: "\f0ed";
}
.fa-cloud-upload:before {
content: "\f0ee";
}
.fa-user-md:before {
content: "\f0f0";
}
.fa-stethoscope:before {
content: "\f0f1";
}
.fa-suitcase:before {
content: "\f0f2";
}
.fa-bell-o:before {
content: "\f0a2";
}
.fa-coffee:before {
content: "\f0f4";
}
.fa-cutlery:before {
content: "\f0f5";
}
.fa-file-text-o:before {
content: "\f0f6";
}
.fa-building-o:before {
content: "\f0f7";
}
.fa-hospital-o:before {
content: "\f0f8";
}
.fa-ambulance:before {
content: "\f0f9";
}
.fa-medkit:before {
content: "\f0fa";
}
.fa-fighter-jet:before {
content: "\f0fb";
}
.fa-beer:before {
content: "\f0fc";
}
.fa-h-square:before {
content: "\f0fd";
}
.fa-plus-square:before {
content: "\f0fe";
}
.fa-angle-double-left:before {
content: "\f100";
}
.fa-angle-double-right:before {
content: "\f101";
}
.fa-angle-double-up:before {
content: "\f102";
}
.fa-angle-double-down:before {
content: "\f103";
}
.fa-angle-left:before {
content: "\f104";
}
.fa-angle-right:before {
content: "\f105";
}
.fa-angle-up:before {
content: "\f106";
}
.fa-angle-down:before {
content: "\f107";
}
.fa-desktop:before {
content: "\f108";
}
.fa-laptop:before {
content: "\f109";
}
.fa-tablet:before {
content: "\f10a";
}
.fa-mobile-phone:before,
.fa-mobile:before {
content: "\f10b";
}
.fa-circle-o:before {
content: "\f10c";
}
.fa-quote-left:before {
content: "\f10d";
}
.fa-quote-right:before {
content: "\f10e";
}
.fa-spinner:before {
content: "\f110";
}
.fa-circle:before {
content: "\f111";
}
.fa-mail-reply:before,
.fa-reply:before {
content: "\f112";
}
.fa-github-alt:before {
content: "\f113";
}
.fa-folder-o:before {
content: "\f114";
}
.fa-folder-open-o:before {
content: "\f115";
}
.fa-smile-o:before {
content: "\f118";
}
.fa-frown-o:before {
content: "\f119";
}
.fa-meh-o:before {
content: "\f11a";
}
.fa-gamepad:before {
content: "\f11b";
}
.fa-keyboard-o:before {
content: "\f11c";
}
.fa-flag-o:before {
content: "\f11d";
}
.fa-flag-checkered:before {
content: "\f11e";
}
.fa-terminal:before {
content: "\f120";
}
.fa-code:before {
content: "\f121";
}
.fa-mail-reply-all:before,
.fa-reply-all:before {
content: "\f122";
}
.fa-star-half-empty:before,
.fa-star-half-full:before,
.fa-star-half-o:before {
content: "\f123";
}
.fa-location-arrow:before {
content: "\f124";
}
.fa-crop:before {
content: "\f125";
}
.fa-code-fork:before {
content: "\f126";
}
.fa-unlink:before,
.fa-chain-broken:before {
content: "\f127";
}
.fa-question:before {
content: "\f128";
}
.fa-info:before {
content: "\f129";
}
.fa-exclamation:before {
content: "\f12a";
}
.fa-superscript:before {
content: "\f12b";
}
.fa-subscript:before {
content: "\f12c";
}
.fa-eraser:before {
content: "\f12d";
}
.fa-puzzle-piece:before {
content: "\f12e";
}
.fa-microphone:before {
content: "\f130";
}
.fa-microphone-slash:before {
content: "\f131";
}
.fa-shield:before {
content: "\f132";
}
.fa-calendar-o:before {
content: "\f133";
}
.fa-fire-extinguisher:before {
content: "\f134";
}
.fa-rocket:before {
content: "\f135";
}
.fa-maxcdn:before {
content: "\f136";
}
.fa-chevron-circle-left:before {
content: "\f137";
}
.fa-chevron-circle-right:before {
content: "\f138";
}
.fa-chevron-circle-up:before {
content: "\f139";
}
.fa-chevron-circle-down:before {
content: "\f13a";
}
.fa-html5:before {
content: "\f13b";
}
.fa-css3:before {
content: "\f13c";
}
.fa-anchor:before {
content: "\f13d";
}
.fa-unlock-alt:before {
content: "\f13e";
}
.fa-bullseye:before {
content: "\f140";
}
.fa-ellipsis-h:before {
content: "\f141";
}
.fa-ellipsis-v:before {
content: "\f142";
}
.fa-rss-square:before {
content: "\f143";
}
.fa-play-circle:before {
content: "\f144";
}
.fa-ticket:before {
content: "\f145";
}
.fa-minus-square:before {
content: "\f146";
}
.fa-minus-square-o:before {
content: "\f147";
}
.fa-level-up:before {
content: "\f148";
}
.fa-level-down:before {
content: "\f149";
}
.fa-check-square:before {
content: "\f14a";
}
.fa-pencil-square:before {
content: "\f14b";
}
.fa-external-link-square:before {
content: "\f14c";
}
.fa-share-square:before {
content: "\f14d";
}
.fa-compass:before {
content: "\f14e";
}
.fa-toggle-down:before,
.fa-caret-square-o-down:before {
content: "\f150";
}
.fa-toggle-up:before,
.fa-caret-square-o-up:before {
content: "\f151";
}
.fa-toggle-right:before,
.fa-caret-square-o-right:before {
content: "\f152";
}
.fa-euro:before,
.fa-eur:before {
content: "\f153";
}
.fa-gbp:before {
content: "\f154";
}
.fa-dollar:before,
.fa-usd:before {
content: "\f155";
}
.fa-rupee:before,
.fa-inr:before {
content: "\f156";
}
.fa-cny:before,
.fa-rmb:before,
.fa-yen:before,
.fa-jpy:before {
content: "\f157";
}
.fa-ruble:before,
.fa-rouble:before,
.fa-rub:before {
content: "\f158";
}
.fa-won:before,
.fa-krw:before {
content: "\f159";
}
.fa-bitcoin:before,
.fa-btc:before {
content: "\f15a";
}
.fa-file:before {
content: "\f15b";
}
.fa-file-text:before {
content: "\f15c";
}
.fa-sort-alpha-asc:before {
content: "\f15d";
}
.fa-sort-alpha-desc:before {
content: "\f15e";
}
.fa-sort-amount-asc:before {
content: "\f160";
}
.fa-sort-amount-desc:before {
content: "\f161";
}
.fa-sort-numeric-asc:before {
content: "\f162";
}
.fa-sort-numeric-desc:before {
content: "\f163";
}
.fa-thumbs-up:before {
content: "\f164";
}
.fa-thumbs-down:before {
content: "\f165";
}
.fa-youtube-square:before {
content: "\f166";
}
.fa-youtube:before {
content: "\f167";
}
.fa-xing:before {
content: "\f168";
}
.fa-xing-square:before {
content: "\f169";
}
.fa-youtube-play:before {
content: "\f16a";
}
.fa-dropbox:before {
content: "\f16b";
}
.fa-stack-overflow:before {
content: "\f16c";
}
.fa-instagram:before {
content: "\f16d";
}
.fa-flickr:before {
content: "\f16e";
}
.fa-adn:before {
content: "\f170";
}
.fa-bitbucket:before {
content: "\f171";
}
.fa-bitbucket-square:before {
content: "\f172";
}
.fa-tumblr:before {
content: "\f173";
}
.fa-tumblr-square:before {
content: "\f174";
}
.fa-long-arrow-down:before {
content: "\f175";
}
.fa-long-arrow-up:before {
content: "\f176";
}
.fa-long-arrow-left:before {
content: "\f177";
}
.fa-long-arrow-right:before {
content: "\f178";
}
.fa-apple:before {
content: "\f179";
}
.fa-windows:before {
content: "\f17a";
}
.fa-android:before {
content: "\f17b";
}
.fa-linux:before {
content: "\f17c";
}
.fa-dribbble:before {
content: "\f17d";
}
.fa-skype:before {
content: "\f17e";
}
.fa-foursquare:before {
content: "\f180";
}
.fa-trello:before {
content: "\f181";
}
.fa-female:before {
content: "\f182";
}
.fa-male:before {
content: "\f183";
}
.fa-gittip:before {
content: "\f184";
}
.fa-sun-o:before {
content: "\f185";
}
.fa-moon-o:before {
content: "\f186";
}
.fa-archive:before {
content: "\f187";
}
.fa-bug:before {
content: "\f188";
}
.fa-vk:before {
content: "\f189";
}
.fa-weibo:before {
content: "\f18a";
}
.fa-renren:before {
content: "\f18b";
}
.fa-pagelines:before {
content: "\f18c";
}
.fa-stack-exchange:before {
content: "\f18d";
}
.fa-arrow-circle-o-right:before {
content: "\f18e";
}
.fa-arrow-circle-o-left:before {
content: "\f190";
}
.fa-toggle-left:before,
.fa-caret-square-o-left:before {
content: "\f191";
}
.fa-dot-circle-o:before {
content: "\f192";
}
.fa-wheelchair:before {
content: "\f193";
}
.fa-vimeo-square:before {
content: "\f194";
}
.fa-turkish-lira:before,
.fa-try:before {
content: "\f195";
}
.fa-plus-square-o:before {
content: "\f196";
}
.fa-space-shuttle:before {
content: "\f197";
}
.fa-slack:before {
content: "\f198";
}
.fa-envelope-square:before {
content: "\f199";
}
.fa-wordpress:before {
content: "\f19a";
}
.fa-openid:before {
content: "\f19b";
}
.fa-institution:before,
.fa-bank:before,
.fa-university:before {
content: "\f19c";
}
.fa-mortar-board:before,
.fa-graduation-cap:before {
content: "\f19d";
}
.fa-yahoo:before {
content: "\f19e";
}
.fa-google:before {
content: "\f1a0";
}
.fa-reddit:before {
content: "\f1a1";
}
.fa-reddit-square:before {
content: "\f1a2";
}
.fa-stumbleupon-circle:before {
content: "\f1a3";
}
.fa-stumbleupon:before {
content: "\f1a4";
}
.fa-delicious:before {
content: "\f1a5";
}
.fa-digg:before {
content: "\f1a6";
}
.fa-pied-piper:before {
content: "\f1a7";
}
.fa-pied-piper-alt:before {
content: "\f1a8";
}
.fa-drupal:before {
content: "\f1a9";
}
.fa-joomla:before {
content: "\f1aa";
}
.fa-language:before {
content: "\f1ab";
}
.fa-fax:before {
content: "\f1ac";
}
.fa-building:before {
content: "\f1ad";
}
.fa-child:before {
content: "\f1ae";
}
.fa-paw:before {
content: "\f1b0";
}
.fa-spoon:before {
content: "\f1b1";
}
.fa-cube:before {
content: "\f1b2";
}
.fa-cubes:before {
content: "\f1b3";
}
.fa-behance:before {
content: "\f1b4";
}
.fa-behance-square:before {
content: "\f1b5";
}
.fa-steam:before {
content: "\f1b6";
}
.fa-steam-square:before {
content: "\f1b7";
}
.fa-recycle:before {
content: "\f1b8";
}
.fa-automobile:before,
.fa-car:before {
content: "\f1b9";
}
.fa-cab:before,
.fa-taxi:before {
content: "\f1ba";
}
.fa-tree:before {
content: "\f1bb";
}
.fa-spotify:before {
content: "\f1bc";
}
.fa-deviantart:before {
content: "\f1bd";
}
.fa-soundcloud:before {
content: "\f1be";
}
.fa-database:before {
content: "\f1c0";
}
.fa-file-pdf-o:before {
content: "\f1c1";
}
.fa-file-word-o:before {
content: "\f1c2";
}
.fa-file-excel-o:before {
content: "\f1c3";
}
.fa-file-powerpoint-o:before {
content: "\f1c4";
}
.fa-file-photo-o:before,
.fa-file-picture-o:before,
.fa-file-image-o:before {
content: "\f1c5";
}
.fa-file-zip-o:before,
.fa-file-archive-o:before {
content: "\f1c6";
}
.fa-file-sound-o:before,
.fa-file-audio-o:before {
content: "\f1c7";
}
.fa-file-movie-o:before,
.fa-file-video-o:before {
content: "\f1c8";
}
.fa-file-code-o:before {
content: "\f1c9";
}
.fa-vine:before {
content: "\f1ca";
}
.fa-codepen:before {
content: "\f1cb";
}
.fa-jsfiddle:before {
content: "\f1cc";
}
.fa-life-bouy:before,
.fa-life-buoy:before,
.fa-life-saver:before,
.fa-support:before,
.fa-life-ring:before {
content: "\f1cd";
}
.fa-circle-o-notch:before {
content: "\f1ce";
}
.fa-ra:before,
.fa-rebel:before {
content: "\f1d0";
}
.fa-ge:before,
.fa-empire:before {
content: "\f1d1";
}
.fa-git-square:before {
content: "\f1d2";
}
.fa-git:before {
content: "\f1d3";
}
.fa-hacker-news:before {
content: "\f1d4";
}
.fa-tencent-weibo:before {
content: "\f1d5";
}
.fa-qq:before {
content: "\f1d6";
}
.fa-wechat:before,
.fa-weixin:before {
content: "\f1d7";
}
.fa-send:before,
.fa-paper-plane:before {
content: "\f1d8";
}
.fa-send-o:before,
.fa-paper-plane-o:before {
content: "\f1d9";
}
.fa-history:before {
content: "\f1da";
}
.fa-circle-thin:before {
content: "\f1db";
}
.fa-header:before {
content: "\f1dc";
}
.fa-paragraph:before {
content: "\f1dd";
}
.fa-sliders:before {
content: "\f1de";
}
.fa-share-alt:before {
content: "\f1e0";
}
.fa-share-alt-square:before {
content: "\f1e1";
}
.fa-bomb:before {
content: "\f1e2";
}
.fa-soccer-ball-o:before,
.fa-futbol-o:before {
content: "\f1e3";
}
.fa-tty:before {
content: "\f1e4";
}
.fa-binoculars:before {
content: "\f1e5";
}
.fa-plug:before {
content: "\f1e6";
}
.fa-slideshare:before {
content: "\f1e7";
}
.fa-twitch:before {
content: "\f1e8";
}
.fa-yelp:before {
content: "\f1e9";
}
.fa-newspaper-o:before {
content: "\f1ea";
}
.fa-wifi:before {
content: "\f1eb";
}
.fa-calculator:before {
content: "\f1ec";
}
.fa-paypal:before {
content: "\f1ed";
}
.fa-google-wallet:before {
content: "\f1ee";
}
.fa-cc-visa:before {
content: "\f1f0";
}
.fa-cc-mastercard:before {
content: "\f1f1";
}
.fa-cc-discover:before {
content: "\f1f2";
}
.fa-cc-amex:before {
content: "\f1f3";
}
.fa-cc-paypal:before {
content: "\f1f4";
}
.fa-cc-stripe:before {
content: "\f1f5";
}
.fa-bell-slash:before {
content: "\f1f6";
}
.fa-bell-slash-o:before {
content: "\f1f7";
}
.fa-trash:before {
content: "\f1f8";
}
.fa-copyright:before {
content: "\f1f9";
}
.fa-at:before {
content: "\f1fa";
}
.fa-eyedropper:before {
content: "\f1fb";
}
.fa-paint-brush:before {
content: "\f1fc";
}
.fa-birthday-cake:before {
content: "\f1fd";
}
.fa-area-chart:before {
content: "\f1fe";
}
.fa-pie-chart:before {
content: "\f200";
}
.fa-line-chart:before {
content: "\f201";
}
.fa-lastfm:before {
content: "\f202";
}
.fa-lastfm-square:before {
content: "\f203";
}
.fa-toggle-off:before {
content: "\f204";
}
.fa-toggle-on:before {
content: "\f205";
}
.fa-bicycle:before {
content: "\f206";
}
.fa-bus:before {
content: "\f207";
}
.fa-ioxhost:before {
content: "\f208";
}
.fa-angellist:before {
content: "\f209";
}
.fa-cc:before {
content: "\f20a";
}
.fa-shekel:before,
.fa-sheqel:before,
.fa-ils:before {
content: "\f20b";
}
.fa-meanpath:before {
content: "\f20c";
}
/*!
*
* IPython base
*
*/
.modal.fade .modal-dialog {
-webkit-transform: translate(0, 0);
-ms-transform: translate(0, 0);
-o-transform: translate(0, 0);
transform: translate(0, 0);
}
code {
color: #000;
}
pre {
font-size: inherit;
line-height: inherit;
}
label {
font-weight: normal;
}
/* Make the page background atleast 100% the height of the view port */
/* Make the page itself atleast 70% the height of the view port */
.border-box-sizing {
box-sizing: border-box;
-moz-box-sizing: border-box;
-webkit-box-sizing: border-box;
}
.corner-all {
border-radius: 2px;
}
.no-padding {
padding: 0px;
}
/* Flexible box model classes */
/* Taken from Alex Russell http://infrequently.org/2009/08/css-3-progress/ */
/* This file is a compatability layer. It allows the usage of flexible box
model layouts accross multiple browsers, including older browsers. The newest,
universal implementation of the flexible box model is used when available (see
`Modern browsers` comments below). Browsers that are known to implement this
new spec completely include:
Firefox 28.0+
Chrome 29.0+
Internet Explorer 11+
Opera 17.0+
Browsers not listed, including Safari, are supported via the styling under the
`Old browsers` comments below.
*/
.hbox {
/* Old browsers */
display: -webkit-box;
-webkit-box-orient: horizontal;
-webkit-box-align: stretch;
display: -moz-box;
-moz-box-orient: horizontal;
-moz-box-align: stretch;
display: box;
box-orient: horizontal;
box-align: stretch;
/* Modern browsers */
display: flex;
flex-direction: row;
align-items: stretch;
}
.hbox > * {
/* Old browsers */
-webkit-box-flex: 0;
-moz-box-flex: 0;
box-flex: 0;
/* Modern browsers */
flex: none;
}
.vbox {
/* Old browsers */
display: -webkit-box;
-webkit-box-orient: vertical;
-webkit-box-align: stretch;
display: -moz-box;
-moz-box-orient: vertical;
-moz-box-align: stretch;
display: box;
box-orient: vertical;
box-align: stretch;
/* Modern browsers */
display: flex;
flex-direction: column;
align-items: stretch;
}
.vbox > * {
/* Old browsers */
-webkit-box-flex: 0;
-moz-box-flex: 0;
box-flex: 0;
/* Modern browsers */
flex: none;
}
.hbox.reverse,
.vbox.reverse,
.reverse {
/* Old browsers */
-webkit-box-direction: reverse;
-moz-box-direction: reverse;
box-direction: reverse;
/* Modern browsers */
flex-direction: row-reverse;
}
.hbox.box-flex0,
.vbox.box-flex0,
.box-flex0 {
/* Old browsers */
-webkit-box-flex: 0;
-moz-box-flex: 0;
box-flex: 0;
/* Modern browsers */
flex: none;
width: auto;
}
.hbox.box-flex1,
.vbox.box-flex1,
.box-flex1 {
/* Old browsers */
-webkit-box-flex: 1;
-moz-box-flex: 1;
box-flex: 1;
/* Modern browsers */
flex: 1;
}
.hbox.box-flex,
.vbox.box-flex,
.box-flex {
/* Old browsers */
/* Old browsers */
-webkit-box-flex: 1;
-moz-box-flex: 1;
box-flex: 1;
/* Modern browsers */
flex: 1;
}
.hbox.box-flex2,
.vbox.box-flex2,
.box-flex2 {
/* Old browsers */
-webkit-box-flex: 2;
-moz-box-flex: 2;
box-flex: 2;
/* Modern browsers */
flex: 2;
}
.box-group1 {
/* Deprecated */
-webkit-box-flex-group: 1;
-moz-box-flex-group: 1;
box-flex-group: 1;
}
.box-group2 {
/* Deprecated */
-webkit-box-flex-group: 2;
-moz-box-flex-group: 2;
box-flex-group: 2;
}
.hbox.start,
.vbox.start,
.start {
/* Old browsers */
-webkit-box-pack: start;
-moz-box-pack: start;
box-pack: start;
/* Modern browsers */
justify-content: flex-start;
}
.hbox.end,
.vbox.end,
.end {
/* Old browsers */
-webkit-box-pack: end;
-moz-box-pack: end;
box-pack: end;
/* Modern browsers */
justify-content: flex-end;
}
.hbox.center,
.vbox.center,
.center {
/* Old browsers */
-webkit-box-pack: center;
-moz-box-pack: center;
box-pack: center;
/* Modern browsers */
justify-content: center;
}
.hbox.baseline,
.vbox.baseline,
.baseline {
/* Old browsers */
-webkit-box-pack: baseline;
-moz-box-pack: baseline;
box-pack: baseline;
/* Modern browsers */
justify-content: baseline;
}
.hbox.stretch,
.vbox.stretch,
.stretch {
/* Old browsers */
-webkit-box-pack: stretch;
-moz-box-pack: stretch;
box-pack: stretch;
/* Modern browsers */
justify-content: stretch;
}
.hbox.align-start,
.vbox.align-start,
.align-start {
/* Old browsers */
-webkit-box-align: start;
-moz-box-align: start;
box-align: start;
/* Modern browsers */
align-items: flex-start;
}
.hbox.align-end,
.vbox.align-end,
.align-end {
/* Old browsers */
-webkit-box-align: end;
-moz-box-align: end;
box-align: end;
/* Modern browsers */
align-items: flex-end;
}
.hbox.align-center,
.vbox.align-center,
.align-center {
/* Old browsers */
-webkit-box-align: center;
-moz-box-align: center;
box-align: center;
/* Modern browsers */
align-items: center;
}
.hbox.align-baseline,
.vbox.align-baseline,
.align-baseline {
/* Old browsers */
-webkit-box-align: baseline;
-moz-box-align: baseline;
box-align: baseline;
/* Modern browsers */
align-items: baseline;
}
.hbox.align-stretch,
.vbox.align-stretch,
.align-stretch {
/* Old browsers */
-webkit-box-align: stretch;
-moz-box-align: stretch;
box-align: stretch;
/* Modern browsers */
align-items: stretch;
}
div.error {
margin: 2em;
text-align: center;
}
div.error > h1 {
font-size: 500%;
line-height: normal;
}
div.error > p {
font-size: 200%;
line-height: normal;
}
div.traceback-wrapper {
text-align: left;
max-width: 800px;
margin: auto;
}
/**
* Primary styles
*
* Author: Jupyter Development Team
*/
body {
background-color: #fff;
/* This makes sure that the body covers the entire window and needs to
be in a different element than the display: box in wrapper below */
position: absolute;
left: 0px;
right: 0px;
top: 0px;
bottom: 0px;
overflow: visible;
}
body > #header {
/* Initially hidden to prevent FLOUC */
display: none;
background-color: #fff;
/* Display over codemirror */
position: relative;
z-index: 100;
}
body > #header #header-container {
padding-bottom: 5px;
padding-top: 5px;
box-sizing: border-box;
-moz-box-sizing: border-box;
-webkit-box-sizing: border-box;
}
body > #header .header-bar {
width: 100%;
height: 1px;
background: #e7e7e7;
margin-bottom: -1px;
}
@media print {
body > #header {
display: none !important;
}
}
#header-spacer {
width: 100%;
visibility: hidden;
}
@media print {
#header-spacer {
display: none;
}
}
#ipython_notebook {
padding-left: 0px;
padding-top: 1px;
padding-bottom: 1px;
}
@media (max-width: 991px) {
#ipython_notebook {
margin-left: 10px;
}
}
[dir="rtl"] #ipython_notebook {
float: right !important;
}
#noscript {
width: auto;
padding-top: 16px;
padding-bottom: 16px;
text-align: center;
font-size: 22px;
color: red;
font-weight: bold;
}
#ipython_notebook img {
height: 28px;
}
#site {
width: 100%;
display: none;
box-sizing: border-box;
-moz-box-sizing: border-box;
-webkit-box-sizing: border-box;
overflow: auto;
}
@media print {
#site {
height: auto !important;
}
}
/* Smaller buttons */
.ui-button .ui-button-text {
padding: 0.2em 0.8em;
font-size: 77%;
}
input.ui-button {
padding: 0.3em 0.9em;
}
span#login_widget {
float: right;
}
span#login_widget > .button,
#logout {
color: #333;
background-color: #fff;
border-color: #ccc;
}
span#login_widget > .button:focus,
#logout:focus,
span#login_widget > .button.focus,
#logout.focus {
color: #333;
background-color: #e6e6e6;
border-color: #8c8c8c;
}
span#login_widget > .button:hover,
#logout:hover {
color: #333;
background-color: #e6e6e6;
border-color: #adadad;
}
span#login_widget > .button:active,
#logout:active,
span#login_widget > .button.active,
#logout.active,
.open > .dropdown-togglespan#login_widget > .button,
.open > .dropdown-toggle#logout {
color: #333;
background-color: #e6e6e6;
border-color: #adadad;
}
span#login_widget > .button:active:hover,
#logout:active:hover,
span#login_widget > .button.active:hover,
#logout.active:hover,
.open > .dropdown-togglespan#login_widget > .button:hover,
.open > .dropdown-toggle#logout:hover,
span#login_widget > .button:active:focus,
#logout:active:focus,
span#login_widget > .button.active:focus,
#logout.active:focus,
.open > .dropdown-togglespan#login_widget > .button:focus,
.open > .dropdown-toggle#logout:focus,
span#login_widget > .button:active.focus,
#logout:active.focus,
span#login_widget > .button.active.focus,
#logout.active.focus,
.open > .dropdown-togglespan#login_widget > .button.focus,
.open > .dropdown-toggle#logout.focus {
color: #333;
background-color: #d4d4d4;
border-color: #8c8c8c;
}
span#login_widget > .button:active,
#logout:active,
span#login_widget > .button.active,
#logout.active,
.open > .dropdown-togglespan#login_widget > .button,
.open > .dropdown-toggle#logout {
background-image: none;
}
span#login_widget > .button.disabled:hover,
#logout.disabled:hover,
span#login_widget > .button[disabled]:hover,
#logout[disabled]:hover,
fieldset[disabled] span#login_widget > .button:hover,
fieldset[disabled] #logout:hover,
span#login_widget > .button.disabled:focus,
#logout.disabled:focus,
span#login_widget > .button[disabled]:focus,
#logout[disabled]:focus,
fieldset[disabled] span#login_widget > .button:focus,
fieldset[disabled] #logout:focus,
span#login_widget > .button.disabled.focus,
#logout.disabled.focus,
span#login_widget > .button[disabled].focus,
#logout[disabled].focus,
fieldset[disabled] span#login_widget > .button.focus,
fieldset[disabled] #logout.focus {
background-color: #fff;
border-color: #ccc;
}
span#login_widget > .button .badge,
#logout .badge {
color: #fff;
background-color: #333;
}
.nav-header {
text-transform: none;
}
#header > span {
margin-top: 10px;
}
.modal_stretch .modal-dialog {
/* Old browsers */
display: -webkit-box;
-webkit-box-orient: vertical;
-webkit-box-align: stretch;
display: -moz-box;
-moz-box-orient: vertical;
-moz-box-align: stretch;
display: box;
box-orient: vertical;
box-align: stretch;
/* Modern browsers */
display: flex;
flex-direction: column;
align-items: stretch;
min-height: 80vh;
}
.modal_stretch .modal-dialog .modal-body {
max-height: calc(100vh - 200px);
overflow: auto;
flex: 1;
}
@media (min-width: 768px) {
.modal .modal-dialog {
width: 700px;
}
}
@media (min-width: 768px) {
select.form-control {
margin-left: 12px;
margin-right: 12px;
}
}
/*!
*
* IPython auth
*
*/
.center-nav {
display: inline-block;
margin-bottom: -4px;
}
/*!
*
* IPython tree view
*
*/
/* We need an invisible input field on top of the sentense*/
/* "Drag file onto the list ..." */
.alternate_upload {
background-color: none;
display: inline;
}
.alternate_upload.form {
padding: 0;
margin: 0;
}
.alternate_upload input.fileinput {
text-align: center;
vertical-align: middle;
display: inline;
opacity: 0;
z-index: 2;
width: 12ex;
margin-right: -12ex;
}
.alternate_upload .btn-upload {
height: 22px;
}
/**
* Primary styles
*
* Author: Jupyter Development Team
*/
[dir="rtl"] #tabs li {
float: right;
}
ul#tabs {
margin-bottom: 4px;
}
[dir="rtl"] ul#tabs {
margin-right: 0px;
}
ul#tabs a {
padding-top: 6px;
padding-bottom: 4px;
}
ul.breadcrumb a:focus,
ul.breadcrumb a:hover {
text-decoration: none;
}
ul.breadcrumb i.icon-home {
font-size: 16px;
margin-right: 4px;
}
ul.breadcrumb span {
color: #5e5e5e;
}
.list_toolbar {
padding: 4px 0 4px 0;
vertical-align: middle;
}
.list_toolbar .tree-buttons {
padding-top: 1px;
}
[dir="rtl"] .list_toolbar .tree-buttons {
float: left !important;
}
[dir="rtl"] .list_toolbar .pull-right {
padding-top: 1px;
float: left !important;
}
[dir="rtl"] .list_toolbar .pull-left {
float: right !important;
}
.dynamic-buttons {
padding-top: 3px;
display: inline-block;
}
.list_toolbar [class*="span"] {
min-height: 24px;
}
.list_header {
font-weight: bold;
background-color: #EEE;
}
.list_placeholder {
font-weight: bold;
padding-top: 4px;
padding-bottom: 4px;
padding-left: 7px;
padding-right: 7px;
}
.list_container {
margin-top: 4px;
margin-bottom: 20px;
border: 1px solid #ddd;
border-radius: 2px;
}
.list_container > div {
border-bottom: 1px solid #ddd;
}
.list_container > div:hover .list-item {
background-color: red;
}
.list_container > div:last-child {
border: none;
}
.list_item:hover .list_item {
background-color: #ddd;
}
.list_item a {
text-decoration: none;
}
.list_item:hover {
background-color: #fafafa;
}
.list_header > div,
.list_item > div {
padding-top: 4px;
padding-bottom: 4px;
padding-left: 7px;
padding-right: 7px;
line-height: 22px;
}
.list_header > div input,
.list_item > div input {
margin-right: 7px;
margin-left: 14px;
vertical-align: baseline;
line-height: 22px;
position: relative;
top: -1px;
}
.list_header > div .item_link,
.list_item > div .item_link {
margin-left: -1px;
vertical-align: baseline;
line-height: 22px;
}
.new-file input[type=checkbox] {
visibility: hidden;
}
.item_name {
line-height: 22px;
height: 24px;
}
.item_icon {
font-size: 14px;
color: #5e5e5e;
margin-right: 7px;
margin-left: 7px;
line-height: 22px;
vertical-align: baseline;
}
.item_buttons {
line-height: 1em;
margin-left: -5px;
}
.item_buttons .btn,
.item_buttons .btn-group,
.item_buttons .input-group {
float: left;
}
.item_buttons > .btn,
.item_buttons > .btn-group,
.item_buttons > .input-group {
margin-left: 5px;
}
.item_buttons .btn {
min-width: 13ex;
}
.item_buttons .running-indicator {
padding-top: 4px;
color: #5cb85c;
}
.item_buttons .kernel-name {
padding-top: 4px;
color: #5bc0de;
margin-right: 7px;
float: left;
}
.toolbar_info {
height: 24px;
line-height: 24px;
}
.list_item input:not([type=checkbox]) {
padding-top: 3px;
padding-bottom: 3px;
height: 22px;
line-height: 14px;
margin: 0px;
}
.highlight_text {
color: blue;
}
#project_name {
display: inline-block;
padding-left: 7px;
margin-left: -2px;
}
#project_name > .breadcrumb {
padding: 0px;
margin-bottom: 0px;
background-color: transparent;
font-weight: bold;
}
#tree-selector {
padding-right: 0px;
}
[dir="rtl"] #tree-selector a {
float: right;
}
#button-select-all {
min-width: 50px;
}
#select-all {
margin-left: 7px;
margin-right: 2px;
}
.menu_icon {
margin-right: 2px;
}
.tab-content .row {
margin-left: 0px;
margin-right: 0px;
}
.folder_icon:before {
display: inline-block;
font: normal normal normal 14px/1 FontAwesome;
font-size: inherit;
text-rendering: auto;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
content: "\f114";
}
.folder_icon:before.pull-left {
margin-right: .3em;
}
.folder_icon:before.pull-right {
margin-left: .3em;
}
.notebook_icon:before {
display: inline-block;
font: normal normal normal 14px/1 FontAwesome;
font-size: inherit;
text-rendering: auto;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
content: "\f02d";
position: relative;
top: -1px;
}
.notebook_icon:before.pull-left {
margin-right: .3em;
}
.notebook_icon:before.pull-right {
margin-left: .3em;
}
.running_notebook_icon:before {
display: inline-block;
font: normal normal normal 14px/1 FontAwesome;
font-size: inherit;
text-rendering: auto;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
content: "\f02d";
position: relative;
top: -1px;
color: #5cb85c;
}
.running_notebook_icon:before.pull-left {
margin-right: .3em;
}
.running_notebook_icon:before.pull-right {
margin-left: .3em;
}
.file_icon:before {
display: inline-block;
font: normal normal normal 14px/1 FontAwesome;
font-size: inherit;
text-rendering: auto;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
content: "\f016";
position: relative;
top: -2px;
}
.file_icon:before.pull-left {
margin-right: .3em;
}
.file_icon:before.pull-right {
margin-left: .3em;
}
#notebook_toolbar .pull-right {
padding-top: 0px;
margin-right: -1px;
}
ul#new-menu {
left: auto;
right: 0;
}
[dir="rtl"] #new-menu {
text-align: right;
}
.kernel-menu-icon {
padding-right: 12px;
width: 24px;
content: "\f096";
}
.kernel-menu-icon:before {
content: "\f096";
}
.kernel-menu-icon-current:before {
content: "\f00c";
}
#tab_content {
padding-top: 20px;
}
#running .panel-group .panel {
margin-top: 3px;
margin-bottom: 1em;
}
#running .panel-group .panel .panel-heading {
background-color: #EEE;
padding-top: 4px;
padding-bottom: 4px;
padding-left: 7px;
padding-right: 7px;
line-height: 22px;
}
#running .panel-group .panel .panel-heading a:focus,
#running .panel-group .panel .panel-heading a:hover {
text-decoration: none;
}
#running .panel-group .panel .panel-body {
padding: 0px;
}
#running .panel-group .panel .panel-body .list_container {
margin-top: 0px;
margin-bottom: 0px;
border: 0px;
border-radius: 0px;
}
#running .panel-group .panel .panel-body .list_container .list_item {
border-bottom: 1px solid #ddd;
}
#running .panel-group .panel .panel-body .list_container .list_item:last-child {
border-bottom: 0px;
}
[dir="rtl"] #running .col-sm-8 {
float: right !important;
}
.delete-button {
display: none;
}
.duplicate-button {
display: none;
}
.rename-button {
display: none;
}
.shutdown-button {
display: none;
}
.dynamic-instructions {
display: inline-block;
padding-top: 4px;
}
/*!
*
* IPython text editor webapp
*
*/
.selected-keymap i.fa {
padding: 0px 5px;
}
.selected-keymap i.fa:before {
content: "\f00c";
}
#mode-menu {
overflow: auto;
max-height: 20em;
}
.edit_app #header {
-webkit-box-shadow: 0px 0px 12px 1px rgba(87, 87, 87, 0.2);
box-shadow: 0px 0px 12px 1px rgba(87, 87, 87, 0.2);
}
.edit_app #menubar .navbar {
/* Use a negative 1 bottom margin, so the border overlaps the border of the
header */
margin-bottom: -1px;
}
.dirty-indicator {
display: inline-block;
font: normal normal normal 14px/1 FontAwesome;
font-size: inherit;
text-rendering: auto;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
width: 20px;
}
.dirty-indicator.pull-left {
margin-right: .3em;
}
.dirty-indicator.pull-right {
margin-left: .3em;
}
.dirty-indicator-dirty {
display: inline-block;
font: normal normal normal 14px/1 FontAwesome;
font-size: inherit;
text-rendering: auto;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
width: 20px;
}
.dirty-indicator-dirty.pull-left {
margin-right: .3em;
}
.dirty-indicator-dirty.pull-right {
margin-left: .3em;
}
.dirty-indicator-clean {
display: inline-block;
font: normal normal normal 14px/1 FontAwesome;
font-size: inherit;
text-rendering: auto;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
width: 20px;
}
.dirty-indicator-clean.pull-left {
margin-right: .3em;
}
.dirty-indicator-clean.pull-right {
margin-left: .3em;
}
.dirty-indicator-clean:before {
display: inline-block;
font: normal normal normal 14px/1 FontAwesome;
font-size: inherit;
text-rendering: auto;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
content: "\f00c";
}
.dirty-indicator-clean:before.pull-left {
margin-right: .3em;
}
.dirty-indicator-clean:before.pull-right {
margin-left: .3em;
}
#filename {
font-size: 16pt;
display: table;
padding: 0px 5px;
}
#current-mode {
padding-left: 5px;
padding-right: 5px;
}
#texteditor-backdrop {
padding-top: 20px;
padding-bottom: 20px;
}
@media not print {
#texteditor-backdrop {
background-color: #EEE;
}
}
@media print {
#texteditor-backdrop #texteditor-container .CodeMirror-gutter,
#texteditor-backdrop #texteditor-container .CodeMirror-gutters {
background-color: #fff;
}
}
@media not print {
#texteditor-backdrop #texteditor-container .CodeMirror-gutter,
#texteditor-backdrop #texteditor-container .CodeMirror-gutters {
background-color: #fff;
}
}
@media not print {
#texteditor-backdrop #texteditor-container {
padding: 0px;
background-color: #fff;
-webkit-box-shadow: 0px 0px 12px 1px rgba(87, 87, 87, 0.2);
box-shadow: 0px 0px 12px 1px rgba(87, 87, 87, 0.2);
}
}
/*!
*
* IPython notebook
*
*/
/* CSS font colors for translated ANSI colors. */
.ansibold {
font-weight: bold;
}
/* use dark versions for foreground, to improve visibility */
.ansiblack {
color: black;
}
.ansired {
color: darkred;
}
.ansigreen {
color: darkgreen;
}
.ansiyellow {
color: #c4a000;
}
.ansiblue {
color: darkblue;
}
.ansipurple {
color: darkviolet;
}
.ansicyan {
color: steelblue;
}
.ansigray {
color: gray;
}
/* and light for background, for the same reason */
.ansibgblack {
background-color: black;
}
.ansibgred {
background-color: red;
}
.ansibggreen {
background-color: green;
}
.ansibgyellow {
background-color: yellow;
}
.ansibgblue {
background-color: blue;
}
.ansibgpurple {
background-color: magenta;
}
.ansibgcyan {
background-color: cyan;
}
.ansibggray {
background-color: gray;
}
div.cell {
/* Old browsers */
display: -webkit-box;
-webkit-box-orient: vertical;
-webkit-box-align: stretch;
display: -moz-box;
-moz-box-orient: vertical;
-moz-box-align: stretch;
display: box;
box-orient: vertical;
box-align: stretch;
/* Modern browsers */
display: flex;
flex-direction: column;
align-items: stretch;
border-radius: 2px;
box-sizing: border-box;
-moz-box-sizing: border-box;
-webkit-box-sizing: border-box;
border-width: 1px;
border-style: solid;
border-color: transparent;
width: 100%;
padding: 5px;
/* This acts as a spacer between cells, that is outside the border */
margin: 0px;
outline: none;
border-left-width: 1px;
padding-left: 5px;
background: linear-gradient(to right, transparent -40px, transparent 1px, transparent 1px, transparent 100%);
}
div.cell.jupyter-soft-selected {
border-left-color: #90CAF9;
border-left-color: #E3F2FD;
border-left-width: 1px;
padding-left: 5px;
border-right-color: #E3F2FD;
border-right-width: 1px;
background: #E3F2FD;
}
@media print {
div.cell.jupyter-soft-selected {
border-color: transparent;
}
}
div.cell.selected {
border-color: #ababab;
border-left-width: 0px;
padding-left: 6px;
background: linear-gradient(to right, #42A5F5 -40px, #42A5F5 5px, transparent 5px, transparent 100%);
}
@media print {
div.cell.selected {
border-color: transparent;
}
}
div.cell.selected.jupyter-soft-selected {
border-left-width: 0;
padding-left: 6px;
background: linear-gradient(to right, #42A5F5 -40px, #42A5F5 7px, #E3F2FD 7px, #E3F2FD 100%);
}
.edit_mode div.cell.selected {
border-color: #66BB6A;
border-left-width: 0px;
padding-left: 6px;
background: linear-gradient(to right, #66BB6A -40px, #66BB6A 5px, transparent 5px, transparent 100%);
}
@media print {
.edit_mode div.cell.selected {
border-color: transparent;
}
}
.prompt {
/* This needs to be wide enough for 3 digit prompt numbers: In[100]: */
min-width: 14ex;
/* This padding is tuned to match the padding on the CodeMirror editor. */
padding: 0.4em;
margin: 0px;
font-family: monospace;
text-align: right;
/* This has to match that of the the CodeMirror class line-height below */
line-height: 1.21429em;
/* Don't highlight prompt number selection */
-webkit-touch-callout: none;
-webkit-user-select: none;
-khtml-user-select: none;
-moz-user-select: none;
-ms-user-select: none;
user-select: none;
/* Use default cursor */
cursor: default;
}
@media (max-width: 540px) {
.prompt {
text-align: left;
}
}
div.inner_cell {
min-width: 0;
/* Old browsers */
display: -webkit-box;
-webkit-box-orient: vertical;
-webkit-box-align: stretch;
display: -moz-box;
-moz-box-orient: vertical;
-moz-box-align: stretch;
display: box;
box-orient: vertical;
box-align: stretch;
/* Modern browsers */
display: flex;
flex-direction: column;
align-items: stretch;
/* Old browsers */
-webkit-box-flex: 1;
-moz-box-flex: 1;
box-flex: 1;
/* Modern browsers */
flex: 1;
}
/* input_area and input_prompt must match in top border and margin for alignment */
div.input_area {
border: 1px solid #cfcfcf;
border-radius: 2px;
background: #f7f7f7;
line-height: 1.21429em;
}
/* This is needed so that empty prompt areas can collapse to zero height when there
is no content in the output_subarea and the prompt. The main purpose of this is
to make sure that empty JavaScript output_subareas have no height. */
div.prompt:empty {
padding-top: 0;
padding-bottom: 0;
}
div.unrecognized_cell {
padding: 5px 5px 5px 0px;
/* Old browsers */
display: -webkit-box;
-webkit-box-orient: horizontal;
-webkit-box-align: stretch;
display: -moz-box;
-moz-box-orient: horizontal;
-moz-box-align: stretch;
display: box;
box-orient: horizontal;
box-align: stretch;
/* Modern browsers */
display: flex;
flex-direction: row;
align-items: stretch;
}
div.unrecognized_cell .inner_cell {
border-radius: 2px;
padding: 5px;
font-weight: bold;
color: red;
border: 1px solid #cfcfcf;
background: #eaeaea;
}
div.unrecognized_cell .inner_cell a {
color: inherit;
text-decoration: none;
}
div.unrecognized_cell .inner_cell a:hover {
color: inherit;
text-decoration: none;
}
@media (max-width: 540px) {
div.unrecognized_cell > div.prompt {
display: none;
}
}
div.code_cell {
/* avoid page breaking on code cells when printing */
}
@media print {
div.code_cell {
page-break-inside: avoid;
}
}
/* any special styling for code cells that are currently running goes here */
div.input {
page-break-inside: avoid;
/* Old browsers */
display: -webkit-box;
-webkit-box-orient: horizontal;
-webkit-box-align: stretch;
display: -moz-box;
-moz-box-orient: horizontal;
-moz-box-align: stretch;
display: box;
box-orient: horizontal;
box-align: stretch;
/* Modern browsers */
display: flex;
flex-direction: row;
align-items: stretch;
}
@media (max-width: 540px) {
div.input {
/* Old browsers */
display: -webkit-box;
-webkit-box-orient: vertical;
-webkit-box-align: stretch;
display: -moz-box;
-moz-box-orient: vertical;
-moz-box-align: stretch;
display: box;
box-orient: vertical;
box-align: stretch;
/* Modern browsers */
display: flex;
flex-direction: column;
align-items: stretch;
}
}
/* input_area and input_prompt must match in top border and margin for alignment */
div.input_prompt {
color: #303F9F;
border-top: 1px solid transparent;
}
div.input_area > div.highlight {
margin: 0.4em;
border: none;
padding: 0px;
background-color: transparent;
}
div.input_area > div.highlight > pre {
margin: 0px;
border: none;
padding: 0px;
background-color: transparent;
}
/* The following gets added to the <head> if it is detected that the user has a
* monospace font with inconsistent normal/bold/italic height. See
* notebookmain.js. Such fonts will have keywords vertically offset with
* respect to the rest of the text. The user should select a better font.
* See: https://github.com/ipython/ipython/issues/1503
*
* .CodeMirror span {
* vertical-align: bottom;
* }
*/
.CodeMirror {
line-height: 1.21429em;
/* Changed from 1em to our global default */
font-size: 14px;
height: auto;
/* Changed to auto to autogrow */
background: none;
/* Changed from white to allow our bg to show through */
}
.CodeMirror-scroll {
/* The CodeMirror docs are a bit fuzzy on if overflow-y should be hidden or visible.*/
/* We have found that if it is visible, vertical scrollbars appear with font size changes.*/
overflow-y: hidden;
overflow-x: auto;
}
.CodeMirror-lines {
/* In CM2, this used to be 0.4em, but in CM3 it went to 4px. We need the em value because */
/* we have set a different line-height and want this to scale with that. */
padding: 0.4em;
}
.CodeMirror-linenumber {
padding: 0 8px 0 4px;
}
.CodeMirror-gutters {
border-bottom-left-radius: 2px;
border-top-left-radius: 2px;
}
.CodeMirror pre {
/* In CM3 this went to 4px from 0 in CM2. We need the 0 value because of how we size */
/* .CodeMirror-lines */
padding: 0;
border: 0;
border-radius: 0;
}
/*
Original style from softwaremaniacs.org (c) Ivan Sagalaev <[email protected]>
Adapted from GitHub theme
*/
.highlight-base {
color: #000;
}
.highlight-variable {
color: #000;
}
.highlight-variable-2 {
color: #1a1a1a;
}
.highlight-variable-3 {
color: #333333;
}
.highlight-string {
color: #BA2121;
}
.highlight-comment {
color: #408080;
font-style: italic;
}
.highlight-number {
color: #080;
}
.highlight-atom {
color: #88F;
}
.highlight-keyword {
color: #008000;
font-weight: bold;
}
.highlight-builtin {
color: #008000;
}
.highlight-error {
color: #f00;
}
.highlight-operator {
color: #AA22FF;
font-weight: bold;
}
.highlight-meta {
color: #AA22FF;
}
/* previously not defined, copying from default codemirror */
.highlight-def {
color: #00f;
}
.highlight-string-2 {
color: #f50;
}
.highlight-qualifier {
color: #555;
}
.highlight-bracket {
color: #997;
}
.highlight-tag {
color: #170;
}
.highlight-attribute {
color: #00c;
}
.highlight-header {
color: blue;
}
.highlight-quote {
color: #090;
}
.highlight-link {
color: #00c;
}
/* apply the same style to codemirror */
.cm-s-ipython span.cm-keyword {
color: #008000;
font-weight: bold;
}
.cm-s-ipython span.cm-atom {
color: #88F;
}
.cm-s-ipython span.cm-number {
color: #080;
}
.cm-s-ipython span.cm-def {
color: #00f;
}
.cm-s-ipython span.cm-variable {
color: #000;
}
.cm-s-ipython span.cm-operator {
color: #AA22FF;
font-weight: bold;
}
.cm-s-ipython span.cm-variable-2 {
color: #1a1a1a;
}
.cm-s-ipython span.cm-variable-3 {
color: #333333;
}
.cm-s-ipython span.cm-comment {
color: #408080;
font-style: italic;
}
.cm-s-ipython span.cm-string {
color: #BA2121;
}
.cm-s-ipython span.cm-string-2 {
color: #f50;
}
.cm-s-ipython span.cm-meta {
color: #AA22FF;
}
.cm-s-ipython span.cm-qualifier {
color: #555;
}
.cm-s-ipython span.cm-builtin {
color: #008000;
}
.cm-s-ipython span.cm-bracket {
color: #997;
}
.cm-s-ipython span.cm-tag {
color: #170;
}
.cm-s-ipython span.cm-attribute {
color: #00c;
}
.cm-s-ipython span.cm-header {
color: blue;
}
.cm-s-ipython span.cm-quote {
color: #090;
}
.cm-s-ipython span.cm-link {
color: #00c;
}
.cm-s-ipython span.cm-error {
color: #f00;
}
.cm-s-ipython span.cm-tab {
background: url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAMCAYAAAAkuj5RAAAAAXNSR0IArs4c6QAAAGFJREFUSMft1LsRQFAQheHPowAKoACx3IgEKtaEHujDjORSgWTH/ZOdnZOcM/sgk/kFFWY0qV8foQwS4MKBCS3qR6ixBJvElOobYAtivseIE120FaowJPN75GMu8j/LfMwNjh4HUpwg4LUAAAAASUVORK5CYII=);
background-position: right;
background-repeat: no-repeat;
}
div.output_wrapper {
/* this position must be relative to enable descendents to be absolute within it */
position: relative;
/* Old browsers */
display: -webkit-box;
-webkit-box-orient: vertical;
-webkit-box-align: stretch;
display: -moz-box;
-moz-box-orient: vertical;
-moz-box-align: stretch;
display: box;
box-orient: vertical;
box-align: stretch;
/* Modern browsers */
display: flex;
flex-direction: column;
align-items: stretch;
z-index: 1;
}
/* class for the output area when it should be height-limited */
div.output_scroll {
/* ideally, this would be max-height, but FF barfs all over that */
height: 24em;
/* FF needs this *and the wrapper* to specify full width, or it will shrinkwrap */
width: 100%;
overflow: auto;
border-radius: 2px;
-webkit-box-shadow: inset 0 2px 8px rgba(0, 0, 0, 0.8);
box-shadow: inset 0 2px 8px rgba(0, 0, 0, 0.8);
display: block;
}
/* output div while it is collapsed */
div.output_collapsed {
margin: 0px;
padding: 0px;
/* Old browsers */
display: -webkit-box;
-webkit-box-orient: vertical;
-webkit-box-align: stretch;
display: -moz-box;
-moz-box-orient: vertical;
-moz-box-align: stretch;
display: box;
box-orient: vertical;
box-align: stretch;
/* Modern browsers */
display: flex;
flex-direction: column;
align-items: stretch;
}
div.out_prompt_overlay {
height: 100%;
padding: 0px 0.4em;
position: absolute;
border-radius: 2px;
}
div.out_prompt_overlay:hover {
/* use inner shadow to get border that is computed the same on WebKit/FF */
-webkit-box-shadow: inset 0 0 1px #000;
box-shadow: inset 0 0 1px #000;
background: rgba(240, 240, 240, 0.5);
}
div.output_prompt {
color: #D84315;
}
/* This class is the outer container of all output sections. */
div.output_area {
padding: 0px;
page-break-inside: avoid;
/* Old browsers */
display: -webkit-box;
-webkit-box-orient: horizontal;
-webkit-box-align: stretch;
display: -moz-box;
-moz-box-orient: horizontal;
-moz-box-align: stretch;
display: box;
box-orient: horizontal;
box-align: stretch;
/* Modern browsers */
display: flex;
flex-direction: row;
align-items: stretch;
}
div.output_area .MathJax_Display {
text-align: left !important;
}
div.output_area .rendered_html table {
margin-left: 0;
margin-right: 0;
}
div.output_area .rendered_html img {
margin-left: 0;
margin-right: 0;
}
div.output_area img,
div.output_area svg {
max-width: 100%;
height: auto;
}
div.output_area img.unconfined,
div.output_area svg.unconfined {
max-width: none;
}
/* This is needed to protect the pre formating from global settings such
as that of bootstrap */
.output {
/* Old browsers */
display: -webkit-box;
-webkit-box-orient: vertical;
-webkit-box-align: stretch;
display: -moz-box;
-moz-box-orient: vertical;
-moz-box-align: stretch;
display: box;
box-orient: vertical;
box-align: stretch;
/* Modern browsers */
display: flex;
flex-direction: column;
align-items: stretch;
}
@media (max-width: 540px) {
div.output_area {
/* Old browsers */
display: -webkit-box;
-webkit-box-orient: vertical;
-webkit-box-align: stretch;
display: -moz-box;
-moz-box-orient: vertical;
-moz-box-align: stretch;
display: box;
box-orient: vertical;
box-align: stretch;
/* Modern browsers */
display: flex;
flex-direction: column;
align-items: stretch;
}
}
div.output_area pre {
margin: 0;
padding: 0;
border: 0;
vertical-align: baseline;
color: black;
background-color: transparent;
border-radius: 0;
}
/* This class is for the output subarea inside the output_area and after
the prompt div. */
div.output_subarea {
overflow-x: auto;
padding: 0.4em;
/* Old browsers */
-webkit-box-flex: 1;
-moz-box-flex: 1;
box-flex: 1;
/* Modern browsers */
flex: 1;
max-width: calc(100% - 14ex);
}
div.output_scroll div.output_subarea {
overflow-x: visible;
}
/* The rest of the output_* classes are for special styling of the different
output types */
/* all text output has this class: */
div.output_text {
text-align: left;
color: #000;
/* This has to match that of the the CodeMirror class line-height below */
line-height: 1.21429em;
}
/* stdout/stderr are 'text' as well as 'stream', but execute_result/error are *not* streams */
div.output_stderr {
background: #fdd;
/* very light red background for stderr */
}
div.output_latex {
text-align: left;
}
/* Empty output_javascript divs should have no height */
div.output_javascript:empty {
padding: 0;
}
.js-error {
color: darkred;
}
/* raw_input styles */
div.raw_input_container {
line-height: 1.21429em;
padding-top: 5px;
}
pre.raw_input_prompt {
/* nothing needed here. */
}
input.raw_input {
font-family: monospace;
font-size: inherit;
color: inherit;
width: auto;
/* make sure input baseline aligns with prompt */
vertical-align: baseline;
/* padding + margin = 0.5em between prompt and cursor */
padding: 0em 0.25em;
margin: 0em 0.25em;
}
input.raw_input:focus {
box-shadow: none;
}
p.p-space {
margin-bottom: 10px;
}
div.output_unrecognized {
padding: 5px;
font-weight: bold;
color: red;
}
div.output_unrecognized a {
color: inherit;
text-decoration: none;
}
div.output_unrecognized a:hover {
color: inherit;
text-decoration: none;
}
.rendered_html {
color: #000;
/* any extras will just be numbers: */
}
.rendered_html em {
font-style: italic;
}
.rendered_html strong {
font-weight: bold;
}
.rendered_html u {
text-decoration: underline;
}
.rendered_html :link {
text-decoration: underline;
}
.rendered_html :visited {
text-decoration: underline;
}
.rendered_html h1 {
font-size: 185.7%;
margin: 1.08em 0 0 0;
font-weight: bold;
line-height: 1.0;
}
.rendered_html h2 {
font-size: 157.1%;
margin: 1.27em 0 0 0;
font-weight: bold;
line-height: 1.0;
}
.rendered_html h3 {
font-size: 128.6%;
margin: 1.55em 0 0 0;
font-weight: bold;
line-height: 1.0;
}
.rendered_html h4 {
font-size: 100%;
margin: 2em 0 0 0;
font-weight: bold;
line-height: 1.0;
}
.rendered_html h5 {
font-size: 100%;
margin: 2em 0 0 0;
font-weight: bold;
line-height: 1.0;
font-style: italic;
}
.rendered_html h6 {
font-size: 100%;
margin: 2em 0 0 0;
font-weight: bold;
line-height: 1.0;
font-style: italic;
}
.rendered_html h1:first-child {
margin-top: 0.538em;
}
.rendered_html h2:first-child {
margin-top: 0.636em;
}
.rendered_html h3:first-child {
margin-top: 0.777em;
}
.rendered_html h4:first-child {
margin-top: 1em;
}
.rendered_html h5:first-child {
margin-top: 1em;
}
.rendered_html h6:first-child {
margin-top: 1em;
}
.rendered_html ul {
list-style: disc;
margin: 0em 2em;
padding-left: 0px;
}
.rendered_html ul ul {
list-style: square;
margin: 0em 2em;
}
.rendered_html ul ul ul {
list-style: circle;
margin: 0em 2em;
}
.rendered_html ol {
list-style: decimal;
margin: 0em 2em;
padding-left: 0px;
}
.rendered_html ol ol {
list-style: upper-alpha;
margin: 0em 2em;
}
.rendered_html ol ol ol {
list-style: lower-alpha;
margin: 0em 2em;
}
.rendered_html ol ol ol ol {
list-style: lower-roman;
margin: 0em 2em;
}
.rendered_html ol ol ol ol ol {
list-style: decimal;
margin: 0em 2em;
}
.rendered_html * + ul {
margin-top: 1em;
}
.rendered_html * + ol {
margin-top: 1em;
}
.rendered_html hr {
color: black;
background-color: black;
}
.rendered_html pre {
margin: 1em 2em;
}
.rendered_html pre,
.rendered_html code {
border: 0;
background-color: #fff;
color: #000;
font-size: 100%;
padding: 0px;
}
.rendered_html blockquote {
margin: 1em 2em;
}
.rendered_html table {
margin-left: auto;
margin-right: auto;
border: 1px solid black;
border-collapse: collapse;
}
.rendered_html tr,
.rendered_html th,
.rendered_html td {
border: 1px solid black;
border-collapse: collapse;
margin: 1em 2em;
}
.rendered_html td,
.rendered_html th {
text-align: left;
vertical-align: middle;
padding: 4px;
}
.rendered_html th {
font-weight: bold;
}
.rendered_html * + table {
margin-top: 1em;
}
.rendered_html p {
text-align: left;
}
.rendered_html * + p {
margin-top: 1em;
}
.rendered_html img {
display: block;
margin-left: auto;
margin-right: auto;
}
.rendered_html * + img {
margin-top: 1em;
}
.rendered_html img,
.rendered_html svg {
max-width: 100%;
height: auto;
}
.rendered_html img.unconfined,
.rendered_html svg.unconfined {
max-width: none;
}
div.text_cell {
/* Old browsers */
display: -webkit-box;
-webkit-box-orient: horizontal;
-webkit-box-align: stretch;
display: -moz-box;
-moz-box-orient: horizontal;
-moz-box-align: stretch;
display: box;
box-orient: horizontal;
box-align: stretch;
/* Modern browsers */
display: flex;
flex-direction: row;
align-items: stretch;
}
@media (max-width: 540px) {
div.text_cell > div.prompt {
display: none;
}
}
div.text_cell_render {
/*font-family: "Helvetica Neue", Arial, Helvetica, Geneva, sans-serif;*/
outline: none;
resize: none;
width: inherit;
border-style: none;
padding: 0.5em 0.5em 0.5em 0.4em;
color: #000;
box-sizing: border-box;
-moz-box-sizing: border-box;
-webkit-box-sizing: border-box;
}
a.anchor-link:link {
text-decoration: none;
padding: 0px 20px;
visibility: hidden;
}
h1:hover .anchor-link,
h2:hover .anchor-link,
h3:hover .anchor-link,
h4:hover .anchor-link,
h5:hover .anchor-link,
h6:hover .anchor-link {
visibility: visible;
}
.text_cell.rendered .input_area {
display: none;
}
.text_cell.rendered .rendered_html {
overflow-x: auto;
overflow-y: hidden;
}
.text_cell.unrendered .text_cell_render {
display: none;
}
.cm-header-1,
.cm-header-2,
.cm-header-3,
.cm-header-4,
.cm-header-5,
.cm-header-6 {
font-weight: bold;
font-family: "Helvetica Neue", Helvetica, Arial, sans-serif;
}
.cm-header-1 {
font-size: 185.7%;
}
.cm-header-2 {
font-size: 157.1%;
}
.cm-header-3 {
font-size: 128.6%;
}
.cm-header-4 {
font-size: 110%;
}
.cm-header-5 {
font-size: 100%;
font-style: italic;
}
.cm-header-6 {
font-size: 100%;
font-style: italic;
}
/*!
*
* IPython notebook webapp
*
*/
@media (max-width: 767px) {
.notebook_app {
padding-left: 0px;
padding-right: 0px;
}
}
#ipython-main-app {
box-sizing: border-box;
-moz-box-sizing: border-box;
-webkit-box-sizing: border-box;
height: 100%;
}
div#notebook_panel {
margin: 0px;
padding: 0px;
box-sizing: border-box;
-moz-box-sizing: border-box;
-webkit-box-sizing: border-box;
height: 100%;
}
div#notebook {
font-size: 14px;
line-height: 20px;
overflow-y: hidden;
overflow-x: auto;
width: 100%;
/* This spaces the page away from the edge of the notebook area */
padding-top: 20px;
margin: 0px;
outline: none;
box-sizing: border-box;
-moz-box-sizing: border-box;
-webkit-box-sizing: border-box;
min-height: 100%;
}
@media not print {
#notebook-container {
padding: 15px;
background-color: #fff;
min-height: 0;
-webkit-box-shadow: 0px 0px 12px 1px rgba(87, 87, 87, 0.2);
box-shadow: 0px 0px 12px 1px rgba(87, 87, 87, 0.2);
}
}
@media print {
#notebook-container {
width: 100%;
}
}
div.ui-widget-content {
border: 1px solid #ababab;
outline: none;
}
pre.dialog {
background-color: #f7f7f7;
border: 1px solid #ddd;
border-radius: 2px;
padding: 0.4em;
padding-left: 2em;
}
p.dialog {
padding: 0.2em;
}
/* Word-wrap output correctly. This is the CSS3 spelling, though Firefox seems
to not honor it correctly. Webkit browsers (Chrome, rekonq, Safari) do.
*/
pre,
code,
kbd,
samp {
white-space: pre-wrap;
}
#fonttest {
font-family: monospace;
}
p {
margin-bottom: 0;
}
.end_space {
min-height: 100px;
transition: height .2s ease;
}
.notebook_app > #header {
-webkit-box-shadow: 0px 0px 12px 1px rgba(87, 87, 87, 0.2);
box-shadow: 0px 0px 12px 1px rgba(87, 87, 87, 0.2);
}
@media not print {
.notebook_app {
background-color: #EEE;
}
}
kbd {
border-style: solid;
border-width: 1px;
box-shadow: none;
margin: 2px;
padding-left: 2px;
padding-right: 2px;
padding-top: 1px;
padding-bottom: 1px;
}
/* CSS for the cell toolbar */
.celltoolbar {
border: thin solid #CFCFCF;
border-bottom: none;
background: #EEE;
border-radius: 2px 2px 0px 0px;
width: 100%;
height: 29px;
padding-right: 4px;
/* Old browsers */
display: -webkit-box;
-webkit-box-orient: horizontal;
-webkit-box-align: stretch;
display: -moz-box;
-moz-box-orient: horizontal;
-moz-box-align: stretch;
display: box;
box-orient: horizontal;
box-align: stretch;
/* Modern browsers */
display: flex;
flex-direction: row;
align-items: stretch;
/* Old browsers */
-webkit-box-pack: end;
-moz-box-pack: end;
box-pack: end;
/* Modern browsers */
justify-content: flex-end;
display: -webkit-flex;
}
@media print {
.celltoolbar {
display: none;
}
}
.ctb_hideshow {
display: none;
vertical-align: bottom;
}
/* ctb_show is added to the ctb_hideshow div to show the cell toolbar.
Cell toolbars are only shown when the ctb_global_show class is also set.
*/
.ctb_global_show .ctb_show.ctb_hideshow {
display: block;
}
.ctb_global_show .ctb_show + .input_area,
.ctb_global_show .ctb_show + div.text_cell_input,
.ctb_global_show .ctb_show ~ div.text_cell_render {
border-top-right-radius: 0px;
border-top-left-radius: 0px;
}
.ctb_global_show .ctb_show ~ div.text_cell_render {
border: 1px solid #cfcfcf;
}
.celltoolbar {
font-size: 87%;
padding-top: 3px;
}
.celltoolbar select {
display: block;
width: 100%;
height: 32px;
padding: 6px 12px;
font-size: 13px;
line-height: 1.42857143;
color: #555555;
background-color: #fff;
background-image: none;
border: 1px solid #ccc;
border-radius: 2px;
-webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075);
box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075);
-webkit-transition: border-color ease-in-out .15s, box-shadow ease-in-out .15s;
-o-transition: border-color ease-in-out .15s, box-shadow ease-in-out .15s;
transition: border-color ease-in-out .15s, box-shadow ease-in-out .15s;
height: 30px;
padding: 5px 10px;
font-size: 12px;
line-height: 1.5;
border-radius: 1px;
width: inherit;
font-size: inherit;
height: 22px;
padding: 0px;
display: inline-block;
}
.celltoolbar select:focus {
border-color: #66afe9;
outline: 0;
-webkit-box-shadow: inset 0 1px 1px rgba(0,0,0,.075), 0 0 8px rgba(102, 175, 233, 0.6);
box-shadow: inset 0 1px 1px rgba(0,0,0,.075), 0 0 8px rgba(102, 175, 233, 0.6);
}
.celltoolbar select::-moz-placeholder {
color: #999;
opacity: 1;
}
.celltoolbar select:-ms-input-placeholder {
color: #999;
}
.celltoolbar select::-webkit-input-placeholder {
color: #999;
}
.celltoolbar select::-ms-expand {
border: 0;
background-color: transparent;
}
.celltoolbar select[disabled],
.celltoolbar select[readonly],
fieldset[disabled] .celltoolbar select {
background-color: #eeeeee;
opacity: 1;
}
.celltoolbar select[disabled],
fieldset[disabled] .celltoolbar select {
cursor: not-allowed;
}
textarea.celltoolbar select {
height: auto;
}
select.celltoolbar select {
height: 30px;
line-height: 30px;
}
textarea.celltoolbar select,
select[multiple].celltoolbar select {
height: auto;
}
.celltoolbar label {
margin-left: 5px;
margin-right: 5px;
}
.completions {
position: absolute;
z-index: 110;
overflow: hidden;
border: 1px solid #ababab;
border-radius: 2px;
-webkit-box-shadow: 0px 6px 10px -1px #adadad;
box-shadow: 0px 6px 10px -1px #adadad;
line-height: 1;
}
.completions select {
background: white;
outline: none;
border: none;
padding: 0px;
margin: 0px;
overflow: auto;
font-family: monospace;
font-size: 110%;
color: #000;
width: auto;
}
.completions select option.context {
color: #286090;
}
#kernel_logo_widget {
float: right !important;
float: right;
}
#kernel_logo_widget .current_kernel_logo {
display: none;
margin-top: -1px;
margin-bottom: -1px;
width: 32px;
height: 32px;
}
#menubar {
box-sizing: border-box;
-moz-box-sizing: border-box;
-webkit-box-sizing: border-box;
margin-top: 1px;
}
#menubar .navbar {
border-top: 1px;
border-radius: 0px 0px 2px 2px;
margin-bottom: 0px;
}
#menubar .navbar-toggle {
float: left;
padding-top: 7px;
padding-bottom: 7px;
border: none;
}
#menubar .navbar-collapse {
clear: left;
}
.nav-wrapper {
border-bottom: 1px solid #e7e7e7;
}
i.menu-icon {
padding-top: 4px;
}
ul#help_menu li a {
overflow: hidden;
padding-right: 2.2em;
}
ul#help_menu li a i {
margin-right: -1.2em;
}
.dropdown-submenu {
position: relative;
}
.dropdown-submenu > .dropdown-menu {
top: 0;
left: 100%;
margin-top: -6px;
margin-left: -1px;
}
.dropdown-submenu:hover > .dropdown-menu {
display: block;
}
.dropdown-submenu > a:after {
display: inline-block;
font: normal normal normal 14px/1 FontAwesome;
font-size: inherit;
text-rendering: auto;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
display: block;
content: "\f0da";
float: right;
color: #333333;
margin-top: 2px;
margin-right: -10px;
}
.dropdown-submenu > a:after.pull-left {
margin-right: .3em;
}
.dropdown-submenu > a:after.pull-right {
margin-left: .3em;
}
.dropdown-submenu:hover > a:after {
color: #262626;
}
.dropdown-submenu.pull-left {
float: none;
}
.dropdown-submenu.pull-left > .dropdown-menu {
left: -100%;
margin-left: 10px;
}
#notification_area {
float: right !important;
float: right;
z-index: 10;
}
.indicator_area {
float: right !important;
float: right;
color: #777;
margin-left: 5px;
margin-right: 5px;
width: 11px;
z-index: 10;
text-align: center;
width: auto;
}
#kernel_indicator {
float: right !important;
float: right;
color: #777;
margin-left: 5px;
margin-right: 5px;
width: 11px;
z-index: 10;
text-align: center;
width: auto;
border-left: 1px solid;
}
#kernel_indicator .kernel_indicator_name {
padding-left: 5px;
padding-right: 5px;
}
#modal_indicator {
float: right !important;
float: right;
color: #777;
margin-left: 5px;
margin-right: 5px;
width: 11px;
z-index: 10;
text-align: center;
width: auto;
}
#readonly-indicator {
float: right !important;
float: right;
color: #777;
margin-left: 5px;
margin-right: 5px;
width: 11px;
z-index: 10;
text-align: center;
width: auto;
margin-top: 2px;
margin-bottom: 0px;
margin-left: 0px;
margin-right: 0px;
display: none;
}
.modal_indicator:before {
width: 1.28571429em;
text-align: center;
}
.edit_mode .modal_indicator:before {
display: inline-block;
font: normal normal normal 14px/1 FontAwesome;
font-size: inherit;
text-rendering: auto;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
content: "\f040";
}
.edit_mode .modal_indicator:before.pull-left {
margin-right: .3em;
}
.edit_mode .modal_indicator:before.pull-right {
margin-left: .3em;
}
.command_mode .modal_indicator:before {
display: inline-block;
font: normal normal normal 14px/1 FontAwesome;
font-size: inherit;
text-rendering: auto;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
content: ' ';
}
.command_mode .modal_indicator:before.pull-left {
margin-right: .3em;
}
.command_mode .modal_indicator:before.pull-right {
margin-left: .3em;
}
.kernel_idle_icon:before {
display: inline-block;
font: normal normal normal 14px/1 FontAwesome;
font-size: inherit;
text-rendering: auto;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
content: "\f10c";
}
.kernel_idle_icon:before.pull-left {
margin-right: .3em;
}
.kernel_idle_icon:before.pull-right {
margin-left: .3em;
}
.kernel_busy_icon:before {
display: inline-block;
font: normal normal normal 14px/1 FontAwesome;
font-size: inherit;
text-rendering: auto;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
content: "\f111";
}
.kernel_busy_icon:before.pull-left {
margin-right: .3em;
}
.kernel_busy_icon:before.pull-right {
margin-left: .3em;
}
.kernel_dead_icon:before {
display: inline-block;
font: normal normal normal 14px/1 FontAwesome;
font-size: inherit;
text-rendering: auto;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
content: "\f1e2";
}
.kernel_dead_icon:before.pull-left {
margin-right: .3em;
}
.kernel_dead_icon:before.pull-right {
margin-left: .3em;
}
.kernel_disconnected_icon:before {
display: inline-block;
font: normal normal normal 14px/1 FontAwesome;
font-size: inherit;
text-rendering: auto;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
content: "\f127";
}
.kernel_disconnected_icon:before.pull-left {
margin-right: .3em;
}
.kernel_disconnected_icon:before.pull-right {
margin-left: .3em;
}
.notification_widget {
color: #777;
z-index: 10;
background: rgba(240, 240, 240, 0.5);
margin-right: 4px;
color: #333;
background-color: #fff;
border-color: #ccc;
}
.notification_widget:focus,
.notification_widget.focus {
color: #333;
background-color: #e6e6e6;
border-color: #8c8c8c;
}
.notification_widget:hover {
color: #333;
background-color: #e6e6e6;
border-color: #adadad;
}
.notification_widget:active,
.notification_widget.active,
.open > .dropdown-toggle.notification_widget {
color: #333;
background-color: #e6e6e6;
border-color: #adadad;
}
.notification_widget:active:hover,
.notification_widget.active:hover,
.open > .dropdown-toggle.notification_widget:hover,
.notification_widget:active:focus,
.notification_widget.active:focus,
.open > .dropdown-toggle.notification_widget:focus,
.notification_widget:active.focus,
.notification_widget.active.focus,
.open > .dropdown-toggle.notification_widget.focus {
color: #333;
background-color: #d4d4d4;
border-color: #8c8c8c;
}
.notification_widget:active,
.notification_widget.active,
.open > .dropdown-toggle.notification_widget {
background-image: none;
}
.notification_widget.disabled:hover,
.notification_widget[disabled]:hover,
fieldset[disabled] .notification_widget:hover,
.notification_widget.disabled:focus,
.notification_widget[disabled]:focus,
fieldset[disabled] .notification_widget:focus,
.notification_widget.disabled.focus,
.notification_widget[disabled].focus,
fieldset[disabled] .notification_widget.focus {
background-color: #fff;
border-color: #ccc;
}
.notification_widget .badge {
color: #fff;
background-color: #333;
}
.notification_widget.warning {
color: #fff;
background-color: #f0ad4e;
border-color: #eea236;
}
.notification_widget.warning:focus,
.notification_widget.warning.focus {
color: #fff;
background-color: #ec971f;
border-color: #985f0d;
}
.notification_widget.warning:hover {
color: #fff;
background-color: #ec971f;
border-color: #d58512;
}
.notification_widget.warning:active,
.notification_widget.warning.active,
.open > .dropdown-toggle.notification_widget.warning {
color: #fff;
background-color: #ec971f;
border-color: #d58512;
}
.notification_widget.warning:active:hover,
.notification_widget.warning.active:hover,
.open > .dropdown-toggle.notification_widget.warning:hover,
.notification_widget.warning:active:focus,
.notification_widget.warning.active:focus,
.open > .dropdown-toggle.notification_widget.warning:focus,
.notification_widget.warning:active.focus,
.notification_widget.warning.active.focus,
.open > .dropdown-toggle.notification_widget.warning.focus {
color: #fff;
background-color: #d58512;
border-color: #985f0d;
}
.notification_widget.warning:active,
.notification_widget.warning.active,
.open > .dropdown-toggle.notification_widget.warning {
background-image: none;
}
.notification_widget.warning.disabled:hover,
.notification_widget.warning[disabled]:hover,
fieldset[disabled] .notification_widget.warning:hover,
.notification_widget.warning.disabled:focus,
.notification_widget.warning[disabled]:focus,
fieldset[disabled] .notification_widget.warning:focus,
.notification_widget.warning.disabled.focus,
.notification_widget.warning[disabled].focus,
fieldset[disabled] .notification_widget.warning.focus {
background-color: #f0ad4e;
border-color: #eea236;
}
.notification_widget.warning .badge {
color: #f0ad4e;
background-color: #fff;
}
.notification_widget.success {
color: #fff;
background-color: #5cb85c;
border-color: #4cae4c;
}
.notification_widget.success:focus,
.notification_widget.success.focus {
color: #fff;
background-color: #449d44;
border-color: #255625;
}
.notification_widget.success:hover {
color: #fff;
background-color: #449d44;
border-color: #398439;
}
.notification_widget.success:active,
.notification_widget.success.active,
.open > .dropdown-toggle.notification_widget.success {
color: #fff;
background-color: #449d44;
border-color: #398439;
}
.notification_widget.success:active:hover,
.notification_widget.success.active:hover,
.open > .dropdown-toggle.notification_widget.success:hover,
.notification_widget.success:active:focus,
.notification_widget.success.active:focus,
.open > .dropdown-toggle.notification_widget.success:focus,
.notification_widget.success:active.focus,
.notification_widget.success.active.focus,
.open > .dropdown-toggle.notification_widget.success.focus {
color: #fff;
background-color: #398439;
border-color: #255625;
}
.notification_widget.success:active,
.notification_widget.success.active,
.open > .dropdown-toggle.notification_widget.success {
background-image: none;
}
.notification_widget.success.disabled:hover,
.notification_widget.success[disabled]:hover,
fieldset[disabled] .notification_widget.success:hover,
.notification_widget.success.disabled:focus,
.notification_widget.success[disabled]:focus,
fieldset[disabled] .notification_widget.success:focus,
.notification_widget.success.disabled.focus,
.notification_widget.success[disabled].focus,
fieldset[disabled] .notification_widget.success.focus {
background-color: #5cb85c;
border-color: #4cae4c;
}
.notification_widget.success .badge {
color: #5cb85c;
background-color: #fff;
}
.notification_widget.info {
color: #fff;
background-color: #5bc0de;
border-color: #46b8da;
}
.notification_widget.info:focus,
.notification_widget.info.focus {
color: #fff;
background-color: #31b0d5;
border-color: #1b6d85;
}
.notification_widget.info:hover {
color: #fff;
background-color: #31b0d5;
border-color: #269abc;
}
.notification_widget.info:active,
.notification_widget.info.active,
.open > .dropdown-toggle.notification_widget.info {
color: #fff;
background-color: #31b0d5;
border-color: #269abc;
}
.notification_widget.info:active:hover,
.notification_widget.info.active:hover,
.open > .dropdown-toggle.notification_widget.info:hover,
.notification_widget.info:active:focus,
.notification_widget.info.active:focus,
.open > .dropdown-toggle.notification_widget.info:focus,
.notification_widget.info:active.focus,
.notification_widget.info.active.focus,
.open > .dropdown-toggle.notification_widget.info.focus {
color: #fff;
background-color: #269abc;
border-color: #1b6d85;
}
.notification_widget.info:active,
.notification_widget.info.active,
.open > .dropdown-toggle.notification_widget.info {
background-image: none;
}
.notification_widget.info.disabled:hover,
.notification_widget.info[disabled]:hover,
fieldset[disabled] .notification_widget.info:hover,
.notification_widget.info.disabled:focus,
.notification_widget.info[disabled]:focus,
fieldset[disabled] .notification_widget.info:focus,
.notification_widget.info.disabled.focus,
.notification_widget.info[disabled].focus,
fieldset[disabled] .notification_widget.info.focus {
background-color: #5bc0de;
border-color: #46b8da;
}
.notification_widget.info .badge {
color: #5bc0de;
background-color: #fff;
}
.notification_widget.danger {
color: #fff;
background-color: #d9534f;
border-color: #d43f3a;
}
.notification_widget.danger:focus,
.notification_widget.danger.focus {
color: #fff;
background-color: #c9302c;
border-color: #761c19;
}
.notification_widget.danger:hover {
color: #fff;
background-color: #c9302c;
border-color: #ac2925;
}
.notification_widget.danger:active,
.notification_widget.danger.active,
.open > .dropdown-toggle.notification_widget.danger {
color: #fff;
background-color: #c9302c;
border-color: #ac2925;
}
.notification_widget.danger:active:hover,
.notification_widget.danger.active:hover,
.open > .dropdown-toggle.notification_widget.danger:hover,
.notification_widget.danger:active:focus,
.notification_widget.danger.active:focus,
.open > .dropdown-toggle.notification_widget.danger:focus,
.notification_widget.danger:active.focus,
.notification_widget.danger.active.focus,
.open > .dropdown-toggle.notification_widget.danger.focus {
color: #fff;
background-color: #ac2925;
border-color: #761c19;
}
.notification_widget.danger:active,
.notification_widget.danger.active,
.open > .dropdown-toggle.notification_widget.danger {
background-image: none;
}
.notification_widget.danger.disabled:hover,
.notification_widget.danger[disabled]:hover,
fieldset[disabled] .notification_widget.danger:hover,
.notification_widget.danger.disabled:focus,
.notification_widget.danger[disabled]:focus,
fieldset[disabled] .notification_widget.danger:focus,
.notification_widget.danger.disabled.focus,
.notification_widget.danger[disabled].focus,
fieldset[disabled] .notification_widget.danger.focus {
background-color: #d9534f;
border-color: #d43f3a;
}
.notification_widget.danger .badge {
color: #d9534f;
background-color: #fff;
}
div#pager {
background-color: #fff;
font-size: 14px;
line-height: 20px;
overflow: hidden;
display: none;
position: fixed;
bottom: 0px;
width: 100%;
max-height: 50%;
padding-top: 8px;
-webkit-box-shadow: 0px 0px 12px 1px rgba(87, 87, 87, 0.2);
box-shadow: 0px 0px 12px 1px rgba(87, 87, 87, 0.2);
/* Display over codemirror */
z-index: 100;
/* Hack which prevents jquery ui resizable from changing top. */
top: auto !important;
}
div#pager pre {
line-height: 1.21429em;
color: #000;
background-color: #f7f7f7;
padding: 0.4em;
}
div#pager #pager-button-area {
position: absolute;
top: 8px;
right: 20px;
}
div#pager #pager-contents {
position: relative;
overflow: auto;
width: 100%;
height: 100%;
}
div#pager #pager-contents #pager-container {
position: relative;
padding: 15px 0px;
box-sizing: border-box;
-moz-box-sizing: border-box;
-webkit-box-sizing: border-box;
}
div#pager .ui-resizable-handle {
top: 0px;
height: 8px;
background: #f7f7f7;
border-top: 1px solid #cfcfcf;
border-bottom: 1px solid #cfcfcf;
/* This injects handle bars (a short, wide = symbol) for
the resize handle. */
}
div#pager .ui-resizable-handle::after {
content: '';
top: 2px;
left: 50%;
height: 3px;
width: 30px;
margin-left: -15px;
position: absolute;
border-top: 1px solid #cfcfcf;
}
.quickhelp {
/* Old browsers */
display: -webkit-box;
-webkit-box-orient: horizontal;
-webkit-box-align: stretch;
display: -moz-box;
-moz-box-orient: horizontal;
-moz-box-align: stretch;
display: box;
box-orient: horizontal;
box-align: stretch;
/* Modern browsers */
display: flex;
flex-direction: row;
align-items: stretch;
line-height: 1.8em;
}
.shortcut_key {
display: inline-block;
width: 21ex;
text-align: right;
font-family: monospace;
}
.shortcut_descr {
display: inline-block;
/* Old browsers */
-webkit-box-flex: 1;
-moz-box-flex: 1;
box-flex: 1;
/* Modern browsers */
flex: 1;
}
span.save_widget {
margin-top: 6px;
}
span.save_widget span.filename {
height: 1em;
line-height: 1em;
padding: 3px;
margin-left: 16px;
border: none;
font-size: 146.5%;
border-radius: 2px;
}
span.save_widget span.filename:hover {
background-color: #e6e6e6;
}
span.checkpoint_status,
span.autosave_status {
font-size: small;
}
@media (max-width: 767px) {
span.save_widget {
font-size: small;
}
span.checkpoint_status,
span.autosave_status {
display: none;
}
}
@media (min-width: 768px) and (max-width: 991px) {
span.checkpoint_status {
display: none;
}
span.autosave_status {
font-size: x-small;
}
}
.toolbar {
padding: 0px;
margin-left: -5px;
margin-top: 2px;
margin-bottom: 5px;
box-sizing: border-box;
-moz-box-sizing: border-box;
-webkit-box-sizing: border-box;
}
.toolbar select,
.toolbar label {
width: auto;
vertical-align: middle;
margin-right: 2px;
margin-bottom: 0px;
display: inline;
font-size: 92%;
margin-left: 0.3em;
margin-right: 0.3em;
padding: 0px;
padding-top: 3px;
}
.toolbar .btn {
padding: 2px 8px;
}
.toolbar .btn-group {
margin-top: 0px;
margin-left: 5px;
}
#maintoolbar {
margin-bottom: -3px;
margin-top: -8px;
border: 0px;
min-height: 27px;
margin-left: 0px;
padding-top: 11px;
padding-bottom: 3px;
}
#maintoolbar .navbar-text {
float: none;
vertical-align: middle;
text-align: right;
margin-left: 5px;
margin-right: 0px;
margin-top: 0px;
}
.select-xs {
height: 24px;
}
.pulse,
.dropdown-menu > li > a.pulse,
li.pulse > a.dropdown-toggle,
li.pulse.open > a.dropdown-toggle {
background-color: #F37626;
color: white;
}
/**
* Primary styles
*
* Author: Jupyter Development Team
*/
/** WARNING IF YOU ARE EDITTING THIS FILE, if this is a .css file, It has a lot
* of chance of beeing generated from the ../less/[samename].less file, you can
* try to get back the less file by reverting somme commit in history
**/
/*
* We'll try to get something pretty, so we
* have some strange css to have the scroll bar on
* the left with fix button on the top right of the tooltip
*/
@-moz-keyframes fadeOut {
from {
opacity: 1;
}
to {
opacity: 0;
}
}
@-webkit-keyframes fadeOut {
from {
opacity: 1;
}
to {
opacity: 0;
}
}
@-moz-keyframes fadeIn {
from {
opacity: 0;
}
to {
opacity: 1;
}
}
@-webkit-keyframes fadeIn {
from {
opacity: 0;
}
to {
opacity: 1;
}
}
/*properties of tooltip after "expand"*/
.bigtooltip {
overflow: auto;
height: 200px;
-webkit-transition-property: height;
-webkit-transition-duration: 500ms;
-moz-transition-property: height;
-moz-transition-duration: 500ms;
transition-property: height;
transition-duration: 500ms;
}
/*properties of tooltip before "expand"*/
.smalltooltip {
-webkit-transition-property: height;
-webkit-transition-duration: 500ms;
-moz-transition-property: height;
-moz-transition-duration: 500ms;
transition-property: height;
transition-duration: 500ms;
text-overflow: ellipsis;
overflow: hidden;
height: 80px;
}
.tooltipbuttons {
position: absolute;
padding-right: 15px;
top: 0px;
right: 0px;
}
.tooltiptext {
/*avoid the button to overlap on some docstring*/
padding-right: 30px;
}
.ipython_tooltip {
max-width: 700px;
/*fade-in animation when inserted*/
-webkit-animation: fadeOut 400ms;
-moz-animation: fadeOut 400ms;
animation: fadeOut 400ms;
-webkit-animation: fadeIn 400ms;
-moz-animation: fadeIn 400ms;
animation: fadeIn 400ms;
vertical-align: middle;
background-color: #f7f7f7;
overflow: visible;
border: #ababab 1px solid;
outline: none;
padding: 3px;
margin: 0px;
padding-left: 7px;
font-family: monospace;
min-height: 50px;
-moz-box-shadow: 0px 6px 10px -1px #adadad;
-webkit-box-shadow: 0px 6px 10px -1px #adadad;
box-shadow: 0px 6px 10px -1px #adadad;
border-radius: 2px;
position: absolute;
z-index: 1000;
}
.ipython_tooltip a {
float: right;
}
.ipython_tooltip .tooltiptext pre {
border: 0;
border-radius: 0;
font-size: 100%;
background-color: #f7f7f7;
}
.pretooltiparrow {
left: 0px;
margin: 0px;
top: -16px;
width: 40px;
height: 16px;
overflow: hidden;
position: absolute;
}
.pretooltiparrow:before {
background-color: #f7f7f7;
border: 1px #ababab solid;
z-index: 11;
content: "";
position: absolute;
left: 15px;
top: 10px;
width: 25px;
height: 25px;
-webkit-transform: rotate(45deg);
-moz-transform: rotate(45deg);
-ms-transform: rotate(45deg);
-o-transform: rotate(45deg);
}
ul.typeahead-list i {
margin-left: -10px;
width: 18px;
}
ul.typeahead-list {
max-height: 80vh;
overflow: auto;
}
ul.typeahead-list > li > a {
/** Firefox bug **/
/* see https://github.com/jupyter/notebook/issues/559 */
white-space: normal;
}
.cmd-palette .modal-body {
padding: 7px;
}
.cmd-palette form {
background: white;
}
.cmd-palette input {
outline: none;
}
.no-shortcut {
display: none;
}
.command-shortcut:before {
content: "(command)";
padding-right: 3px;
color: #777777;
}
.edit-shortcut:before {
content: "(edit)";
padding-right: 3px;
color: #777777;
}
#find-and-replace #replace-preview .match,
#find-and-replace #replace-preview .insert {
background-color: #BBDEFB;
border-color: #90CAF9;
border-style: solid;
border-width: 1px;
border-radius: 0px;
}
#find-and-replace #replace-preview .replace .match {
background-color: #FFCDD2;
border-color: #EF9A9A;
border-radius: 0px;
}
#find-and-replace #replace-preview .replace .insert {
background-color: #C8E6C9;
border-color: #A5D6A7;
border-radius: 0px;
}
#find-and-replace #replace-preview {
max-height: 60vh;
overflow: auto;
}
#find-and-replace #replace-preview pre {
padding: 5px 10px;
}
.terminal-app {
background: #EEE;
}
.terminal-app #header {
background: #fff;
-webkit-box-shadow: 0px 0px 12px 1px rgba(87, 87, 87, 0.2);
box-shadow: 0px 0px 12px 1px rgba(87, 87, 87, 0.2);
}
.terminal-app .terminal {
width: 100%;
float: left;
font-family: monospace;
color: white;
background: black;
padding: 0.4em;
border-radius: 2px;
-webkit-box-shadow: 0px 0px 12px 1px rgba(87, 87, 87, 0.4);
box-shadow: 0px 0px 12px 1px rgba(87, 87, 87, 0.4);
}
.terminal-app .terminal,
.terminal-app .terminal dummy-screen {
line-height: 1em;
font-size: 14px;
}
.terminal-app .terminal .xterm-rows {
padding: 10px;
}
.terminal-app .terminal-cursor {
color: black;
background: white;
}
.terminal-app #terminado-container {
margin-top: 20px;
}
/*# sourceMappingURL=style.min.css.map */
</style>
<style type="text/css">
.highlight .hll { background-color: #ffffcc }
.highlight { background: #f8f8f8; }
.highlight .c { color: #408080; font-style: italic } /* Comment */
.highlight .err { border: 1px solid #FF0000 } /* Error */
.highlight .k { color: #008000; font-weight: bold } /* Keyword */
.highlight .o { color: #666666 } /* Operator */
.highlight .ch { color: #408080; font-style: italic } /* Comment.Hashbang */
.highlight .cm { color: #408080; font-style: italic } /* Comment.Multiline */
.highlight .cp { color: #BC7A00 } /* Comment.Preproc */
.highlight .cpf { color: #408080; font-style: italic } /* Comment.PreprocFile */
.highlight .c1 { color: #408080; font-style: italic } /* Comment.Single */
.highlight .cs { color: #408080; font-style: italic } /* Comment.Special */
.highlight .gd { color: #A00000 } /* Generic.Deleted */
.highlight .ge { font-style: italic } /* Generic.Emph */
.highlight .gr { color: #FF0000 } /* Generic.Error */
.highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */
.highlight .gi { color: #00A000 } /* Generic.Inserted */
.highlight .go { color: #888888 } /* Generic.Output */
.highlight .gp { color: #000080; font-weight: bold } /* Generic.Prompt */
.highlight .gs { font-weight: bold } /* Generic.Strong */
.highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */
.highlight .gt { color: #0044DD } /* Generic.Traceback */
.highlight .kc { color: #008000; font-weight: bold } /* Keyword.Constant */
.highlight .kd { color: #008000; font-weight: bold } /* Keyword.Declaration */
.highlight .kn { color: #008000; font-weight: bold } /* Keyword.Namespace */
.highlight .kp { color: #008000 } /* Keyword.Pseudo */
.highlight .kr { color: #008000; font-weight: bold } /* Keyword.Reserved */
.highlight .kt { color: #B00040 } /* Keyword.Type */
.highlight .m { color: #666666 } /* Literal.Number */
.highlight .s { color: #BA2121 } /* Literal.String */
.highlight .na { color: #7D9029 } /* Name.Attribute */
.highlight .nb { color: #008000 } /* Name.Builtin */
.highlight .nc { color: #0000FF; font-weight: bold } /* Name.Class */
.highlight .no { color: #880000 } /* Name.Constant */
.highlight .nd { color: #AA22FF } /* Name.Decorator */
.highlight .ni { color: #999999; font-weight: bold } /* Name.Entity */
.highlight .ne { color: #D2413A; font-weight: bold } /* Name.Exception */
.highlight .nf { color: #0000FF } /* Name.Function */
.highlight .nl { color: #A0A000 } /* Name.Label */
.highlight .nn { color: #0000FF; font-weight: bold } /* Name.Namespace */
.highlight .nt { color: #008000; font-weight: bold } /* Name.Tag */
.highlight .nv { color: #19177C } /* Name.Variable */
.highlight .ow { color: #AA22FF; font-weight: bold } /* Operator.Word */
.highlight .w { color: #bbbbbb } /* Text.Whitespace */
.highlight .mb { color: #666666 } /* Literal.Number.Bin */
.highlight .mf { color: #666666 } /* Literal.Number.Float */
.highlight .mh { color: #666666 } /* Literal.Number.Hex */
.highlight .mi { color: #666666 } /* Literal.Number.Integer */
.highlight .mo { color: #666666 } /* Literal.Number.Oct */
.highlight .sa { color: #BA2121 } /* Literal.String.Affix */
.highlight .sb { color: #BA2121 } /* Literal.String.Backtick */
.highlight .sc { color: #BA2121 } /* Literal.String.Char */
.highlight .dl { color: #BA2121 } /* Literal.String.Delimiter */
.highlight .sd { color: #BA2121; font-style: italic } /* Literal.String.Doc */
.highlight .s2 { color: #BA2121 } /* Literal.String.Double */
.highlight .se { color: #BB6622; font-weight: bold } /* Literal.String.Escape */
.highlight .sh { color: #BA2121 } /* Literal.String.Heredoc */
.highlight .si { color: #BB6688; font-weight: bold } /* Literal.String.Interpol */
.highlight .sx { color: #008000 } /* Literal.String.Other */
.highlight .sr { color: #BB6688 } /* Literal.String.Regex */
.highlight .s1 { color: #BA2121 } /* Literal.String.Single */
.highlight .ss { color: #19177C } /* Literal.String.Symbol */
.highlight .bp { color: #008000 } /* Name.Builtin.Pseudo */
.highlight .fm { color: #0000FF } /* Name.Function.Magic */
.highlight .vc { color: #19177C } /* Name.Variable.Class */
.highlight .vg { color: #19177C } /* Name.Variable.Global */
.highlight .vi { color: #19177C } /* Name.Variable.Instance */
.highlight .vm { color: #19177C } /* Name.Variable.Magic */
.highlight .il { color: #666666 } /* Literal.Number.Integer.Long */
</style>
<style type="text/css">
/* Temporary definitions which will become obsolete with Notebook release 5.0 */
.ansi-black-fg { color: #3E424D; }
.ansi-black-bg { background-color: #3E424D; }
.ansi-black-intense-fg { color: #282C36; }
.ansi-black-intense-bg { background-color: #282C36; }
.ansi-red-fg { color: #E75C58; }
.ansi-red-bg { background-color: #E75C58; }
.ansi-red-intense-fg { color: #B22B31; }
.ansi-red-intense-bg { background-color: #B22B31; }
.ansi-green-fg { color: #00A250; }
.ansi-green-bg { background-color: #00A250; }
.ansi-green-intense-fg { color: #007427; }
.ansi-green-intense-bg { background-color: #007427; }
.ansi-yellow-fg { color: #DDB62B; }
.ansi-yellow-bg { background-color: #DDB62B; }
.ansi-yellow-intense-fg { color: #B27D12; }
.ansi-yellow-intense-bg { background-color: #B27D12; }
.ansi-blue-fg { color: #208FFB; }
.ansi-blue-bg { background-color: #208FFB; }
.ansi-blue-intense-fg { color: #0065CA; }
.ansi-blue-intense-bg { background-color: #0065CA; }
.ansi-magenta-fg { color: #D160C4; }
.ansi-magenta-bg { background-color: #D160C4; }
.ansi-magenta-intense-fg { color: #A03196; }
.ansi-magenta-intense-bg { background-color: #A03196; }
.ansi-cyan-fg { color: #60C6C8; }
.ansi-cyan-bg { background-color: #60C6C8; }
.ansi-cyan-intense-fg { color: #258F8F; }
.ansi-cyan-intense-bg { background-color: #258F8F; }
.ansi-white-fg { color: #C5C1B4; }
.ansi-white-bg { background-color: #C5C1B4; }
.ansi-white-intense-fg { color: #A1A6B2; }
.ansi-white-intense-bg { background-color: #A1A6B2; }
.ansi-bold { font-weight: bold; }
</style>
<style type="text/css">
/* Overrides of notebook CSS for static HTML export */
body {
overflow: visible;
padding: 8px;
}
div#notebook {
overflow: visible;
border-top: none;
}@media print {
div.cell {
display: block;
page-break-inside: avoid;
}
div.output_wrapper {
display: block;
page-break-inside: avoid;
}
div.output {
display: block;
page-break-inside: avoid;
}
}
</style>
<!-- Custom stylesheet, it must be in the same directory as the html file -->
<link rel="stylesheet" href="custom.css">
<!-- Loading mathjax macro -->
<!-- Load mathjax -->
<script src="https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.1/MathJax.js?config=TeX-AMS_HTML"></script>
<!-- MathJax configuration -->
<script type="text/x-mathjax-config">
MathJax.Hub.Config({
tex2jax: {
inlineMath: [ ['$','$'], ["\\(","\\)"] ],
displayMath: [ ['$$','$$'], ["\\[","\\]"] ],
processEscapes: true,
processEnvironments: true
},
// Center justify equations in code and markdown cells. Elsewhere
// we use CSS to left justify single line equations in code cells.
displayAlign: 'center',
"HTML-CSS": {
styles: {'.MathJax_Display': {"margin": 0}},
linebreaks: { automatic: true }
}
});
</script>
<!-- End of mathjax configuration --></head>
<body>
<div tabindex="-1" id="notebook" class="border-box-sizing">
<div class="container" id="notebook-container">
<div class="cell border-box-sizing text_cell rendered"><div class="prompt input_prompt">
</div>
<div class="inner_cell">
<div class="text_cell_render border-box-sizing rendered_html">
<h1 id="🚀-100-Times-Faster-Natural-Language-Processing-in Python">🚀 100 Times Faster Natural Language Processing in Python<a class="anchor-link" href="#🚀-100-Times-Faster-Natural-Language-Processing-in Python">¶</a></h1>
</div>
</div>
</div>
<div class="cell border-box-sizing text_cell rendered"><div class="prompt input_prompt">
</div>
<div class="inner_cell">
<div class="text_cell_render border-box-sizing rendered_html">
<p>This iPython notebook contains the examples detailed in our post "🚀 100 Times Faster Natural Language Processing in Python".</p>
<p>To run the notebook, you will first need to:</p>
<ul>
<li><a href="http://cython.readthedocs.io/en/latest/src/quickstart/install.html">install Cython</a>, e.g. <code>pip install cython</code></li>
<li><a href="https://spacy.io/usage/">install spaCy</a>, e.g. <code>pip install spacy</code></li>
<li><a href="https://spacy.io/usage/models">download a language model for spaCy</a>, e.g. <code>python -m spacy download en</code></li>
</ul>
</div>
</div>
</div>
<div class="cell border-box-sizing text_cell rendered"><div class="prompt input_prompt">
</div>
<div class="inner_cell">
<div class="text_cell_render border-box-sizing rendered_html">
<p>Cython then has to be activated in the notebook as follows:</p>
</div>
</div>
</div>
<div class="cell border-box-sizing code_cell rendered">
<div class="input">
<div class="prompt input_prompt">In [5]:</div>
<div class="inner_cell">
<div class="input_area">
<div class=" highlight hl-ipython3"><pre><span></span><span class="o">%</span><span class="k">load_ext</span> Cython
</pre></div>
</div>
</div>
</div>
</div>
<div class="cell border-box-sizing text_cell rendered"><div class="prompt input_prompt">
</div>
<div class="inner_cell">
<div class="text_cell_render border-box-sizing rendered_html">
<h1 id="Fast-loops-in-Python-with-a-bit-of Cython">Fast loops in Python with a bit of Cython<a class="anchor-link" href="#Fast-loops-in-Python-with-a-bit-of Cython">¶</a></h1>
</div>
</div>
</div>
<div class="cell border-box-sizing text_cell rendered"><div class="prompt input_prompt">
</div>
<div class="inner_cell">
<div class="text_cell_render border-box-sizing rendered_html">
<p><img src="https://cdn-images-1.medium.com/max/800/0*RA89oQ-0j3Rscipw.jpg" alt="Rectangles" title="Rectangles"></p>
<p>In this simple example we have a large set of rectangles that we store as a list of Python objects, e.g. instances of a Rectangle class. The main job of our module is to iterate over this list in order to count how many rectangles have an area larger than a specific threshold.</p>
<p>Our Python module is quite simple and looks like this (see also here: <a href="https://gist.github.com/thomwolf/0709b5a72cf3620cd00d94791213d38e">https://gist.github.com/thomwolf/0709b5a72cf3620cd00d94791213d38e</a>):</p>
</div>
</div>
</div>
<div class="cell border-box-sizing code_cell rendered">
<div class="input">
<div class="prompt input_prompt">In [40]:</div>
<div class="inner_cell">
<div class="input_area">
<div class=" highlight hl-ipython3"><pre><span></span><span class="kn">from</span> <span class="nn">random</span> <span class="k">import</span> <span class="n">random</span>
<span class="k">class</span> <span class="nc">Rectangle</span><span class="p">:</span>
<span class="k">def</span> <span class="nf">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">w</span><span class="p">,</span> <span class="n">h</span><span class="p">):</span>
<span class="bp">self</span><span class="o">.</span><span class="n">w</span> <span class="o">=</span> <span class="n">w</span>
<span class="bp">self</span><span class="o">.</span><span class="n">h</span> <span class="o">=</span> <span class="n">h</span>
<span class="k">def</span> <span class="nf">area</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">w</span> <span class="o">*</span> <span class="bp">self</span><span class="o">.</span><span class="n">h</span>
<span class="k">def</span> <span class="nf">check_rectangles_py</span><span class="p">(</span><span class="n">rectangles</span><span class="p">,</span> <span class="n">threshold</span><span class="p">):</span>
<span class="n">n_out</span> <span class="o">=</span> <span class="mi">0</span>
<span class="k">for</span> <span class="n">rectangle</span> <span class="ow">in</span> <span class="n">rectangles</span><span class="p">:</span>
<span class="k">if</span> <span class="n">rectangle</span><span class="o">.</span><span class="n">area</span><span class="p">()</span> <span class="o">></span> <span class="n">threshold</span><span class="p">:</span>
<span class="n">n_out</span> <span class="o">+=</span> <span class="mi">1</span>
<span class="k">return</span> <span class="n">n_out</span>
<span class="k">def</span> <span class="nf">main_rectangles_slow</span><span class="p">():</span>
<span class="n">n_rectangles</span> <span class="o">=</span> <span class="mi">10000000</span>
<span class="n">rectangles</span> <span class="o">=</span> <span class="nb">list</span><span class="p">(</span><span class="n">Rectangle</span><span class="p">(</span><span class="n">random</span><span class="p">(),</span> <span class="n">random</span><span class="p">())</span> <span class="k">for</span> <span class="n">i</span> <span class="ow">in</span> <span class="nb">range</span><span class="p">(</span><span class="n">n_rectangles</span><span class="p">))</span>
<span class="n">n_out</span> <span class="o">=</span> <span class="n">check_rectangles_py</span><span class="p">(</span><span class="n">rectangles</span><span class="p">,</span> <span class="n">threshold</span><span class="o">=</span><span class="mf">0.25</span><span class="p">)</span>
<span class="nb">print</span><span class="p">(</span><span class="n">n_out</span><span class="p">)</span>
</pre></div>
</div>
</div>
</div>
</div>
<div class="cell border-box-sizing code_cell rendered">
<div class="input">
<div class="prompt input_prompt">In [41]:</div>
<div class="inner_cell">
<div class="input_area">
<div class=" highlight hl-ipython3"><pre><span></span><span class="c1"># Let's run it:</span>
<span class="n">main_rectangles_slow</span><span class="p">()</span>
</pre></div>
</div>
</div>
</div>
<div class="output_wrapper">
<div class="output">
<div class="output_area">
<div class="prompt"></div>
<div class="output_subarea output_stream output_stdout output_text">
<pre>4034722
</pre>
</div>
</div>
</div>
</div>
</div>
<div class="cell border-box-sizing text_cell rendered"><div class="prompt input_prompt">
</div>
<div class="inner_cell">
<div class="text_cell_render border-box-sizing rendered_html">
<p>The <code>check_rectangles</code> function which loops over a large number of Python objects is our bottleneck!</p>
<p>Let's write it in Cython.</p>
<p>We indicate the cell is a Cython cell by using the <code>%%cython</code> magic command. We the cell is run, the cython code will be written in a temporary file, compiled and reimported in the iPython space. The Cython code thus have to be somehow self contained.</p>
</div>
</div>
</div>
<div class="cell border-box-sizing code_cell rendered">
<div class="input">
<div class="prompt input_prompt">In [44]:</div>
<div class="inner_cell">
<div class="input_area">
<div class=" highlight hl-ipython3"><pre><span></span><span class="o">%%</span><span class="n">cython</span>
<span class="k">from</span> <span class="nn">cymem.cymem</span> <span class="k">cimport</span> <span class="n">Pool</span>
<span class="k">from</span> <span class="nn">random</span> <span class="k">import</span> <span class="n">random</span>
<span class="k">cdef</span> <span class="k">struct</span> <span class="nf">Rectangle</span><span class="p">:</span>
<span class="nb">float</span> <span class="n">w</span>
<span class="nb">float</span> <span class="n">h</span>
<span class="k">cdef</span> <span class="kt">int</span> <span class="nf">check_rectangles_cy</span><span class="p">(</span><span class="n">Rectangle</span><span class="o">*</span> <span class="n">rectangles</span><span class="p">,</span> <span class="nb">int</span> <span class="n">n_rectangles</span><span class="p">,</span> <span class="nb">float</span> <span class="n">threshold</span><span class="p">):</span>
<span class="k">cdef</span> <span class="kt">int</span> <span class="nf">n_out</span> <span class="o">=</span> <span class="mf">0</span>
<span class="c"># C arrays contain no size information => we need to state it explicitly</span>
<span class="k">for</span> <span class="n">rectangle</span> <span class="ow">in</span> <span class="n">rectangles</span><span class="p">[:</span><span class="n">n_rectangles</span><span class="p">]:</span>
<span class="k">if</span> <span class="n">rectangle</span><span class="o">.</span><span class="n">w</span> <span class="o">*</span> <span class="n">rectangle</span><span class="o">.</span><span class="n">h</span> <span class="o">></span> <span class="n">threshold</span><span class="p">:</span>
<span class="n">n_out</span> <span class="o">+=</span> <span class="mf">1</span>
<span class="k">return</span> <span class="n">n_out</span>
<span class="k">cpdef</span> <span class="nf">main_rectangles_fast</span><span class="p">():</span>
<span class="k">cdef</span> <span class="kt">int</span> <span class="nf">n_rectangles</span> <span class="o">=</span> <span class="mf">10000000</span>
<span class="k">cdef</span> <span class="kt">float</span> <span class="nf">threshold</span> <span class="o">=</span> <span class="mf">0.25</span>
<span class="k">cdef</span> <span class="kt">Pool</span> <span class="nf">mem</span> <span class="o">=</span> <span class="n">Pool</span><span class="p">()</span>
<span class="k">cdef</span> <span class="kt">Rectangle</span>* <span class="nf">rectangles</span> <span class="o">=</span> <span class="o"><</span><span class="n">Rectangle</span><span class="o">*></span><span class="n">mem</span><span class="o">.</span><span class="n">alloc</span><span class="p">(</span><span class="n">n_rectangles</span><span class="p">,</span> <span class="n">sizeof</span><span class="p">(</span><span class="n">Rectangle</span><span class="p">))</span>
<span class="k">for</span> <span class="n">i</span> <span class="ow">in</span> <span class="nb">range</span><span class="p">(</span><span class="n">n_rectangles</span><span class="p">):</span>
<span class="n">rectangles</span><span class="p">[</span><span class="n">i</span><span class="p">]</span><span class="o">.</span><span class="n">w</span> <span class="o">=</span> <span class="n">random</span><span class="p">()</span>
<span class="n">rectangles</span><span class="p">[</span><span class="n">i</span><span class="p">]</span><span class="o">.</span><span class="n">h</span> <span class="o">=</span> <span class="n">random</span><span class="p">()</span>
<span class="n">n_out</span> <span class="o">=</span> <span class="n">check_rectangles_cy</span><span class="p">(</span><span class="n">rectangles</span><span class="p">,</span> <span class="n">n_rectangles</span><span class="p">,</span> <span class="n">threshold</span><span class="p">)</span>
<span class="k">print</span><span class="p">(</span><span class="n">n_out</span><span class="p">)</span>
</pre></div>
</div>
</div>
</div>
</div>
<div class="cell border-box-sizing code_cell rendered">
<div class="input">
<div class="prompt input_prompt">In [45]:</div>
<div class="inner_cell">
<div class="input_area">
<div class=" highlight hl-ipython3"><pre><span></span><span class="n">main_rectangles_fast</span><span class="p">()</span>
</pre></div>
</div>
</div>
</div>
<div class="output_wrapper">
<div class="output">
<div class="output_area">
<div class="prompt"></div>
<div class="output_subarea output_stream output_stdout output_text">
<pre>4033909
</pre>
</div>
</div>
</div>
</div>
</div>
<div class="cell border-box-sizing text_cell rendered"><div class="prompt input_prompt">
</div>
<div class="inner_cell">
<div class="text_cell_render border-box-sizing rendered_html">
<p>In this simple case we are about 20 times faster in Cython.</p>
<p>The ratio of improvement depends a lot on the specific syntax of the Python program.</p>
<p>While the speed in Cython is rather predictible once your code make only use of C level objects (it is usually directly the fastest possible speed), the speed of Python can vary a lot depending on how your program is written and how much overhead the interpreter will add.</p>
</div>
</div>
</div>
<div class="cell border-box-sizing text_cell rendered"><div class="prompt input_prompt">
</div>
<div class="inner_cell">
<div class="text_cell_render border-box-sizing rendered_html">
<p>How can you be sure you Cython program makes only use of C level structures?</p>
<p>Use the <code>-a</code> or <code>--annotate</code> flag in the <code>%%cython</code> magic command to display a code analysis with the line accessing and using Python objects highlighted in yellow.</p>
<p>Here is how our the code analysis of previous program looks:</p>
</div>
</div>
</div>
<div class="cell border-box-sizing code_cell rendered">
<div class="input">
<div class="prompt input_prompt">In [53]:</div>
<div class="inner_cell">
<div class="input_area">
<div class=" highlight hl-ipython3"><pre><span></span><span class="o">%%</span><span class="n">cython</span> <span class="o">-</span><span class="n">a</span>
<span class="k">from</span> <span class="nn">cymem.cymem</span> <span class="k">cimport</span> <span class="n">Pool</span>
<span class="k">from</span> <span class="nn">random</span> <span class="k">import</span> <span class="n">random</span>
<span class="k">cdef</span> <span class="k">struct</span> <span class="nf">Rectangle</span><span class="p">:</span>
<span class="nb">float</span> <span class="n">w</span>
<span class="nb">float</span> <span class="n">h</span>
<span class="k">cdef</span> <span class="kt">int</span> <span class="nf">check_rectangles_cy</span><span class="p">(</span><span class="n">Rectangle</span><span class="o">*</span> <span class="n">rectangles</span><span class="p">,</span> <span class="nb">int</span> <span class="n">n_rectangles</span><span class="p">,</span> <span class="nb">float</span> <span class="n">threshold</span><span class="p">):</span>
<span class="k">cdef</span> <span class="kt">int</span> <span class="nf">n_out</span> <span class="o">=</span> <span class="mf">0</span>
<span class="c"># C arrays contain no size information => we need to state it explicitly</span>
<span class="k">for</span> <span class="n">rectangle</span> <span class="ow">in</span> <span class="n">rectangles</span><span class="p">[:</span><span class="n">n_rectangles</span><span class="p">]:</span>
<span class="k">if</span> <span class="n">rectangle</span><span class="o">.</span><span class="n">w</span> <span class="o">*</span> <span class="n">rectangle</span><span class="o">.</span><span class="n">h</span> <span class="o">></span> <span class="n">threshold</span><span class="p">:</span>
<span class="n">n_out</span> <span class="o">+=</span> <span class="mf">1</span>
<span class="k">return</span> <span class="n">n_out</span>
<span class="k">cpdef</span> <span class="nf">main_rectangles_fast</span><span class="p">():</span>
<span class="k">cdef</span> <span class="kt">int</span> <span class="nf">n_rectangles</span> <span class="o">=</span> <span class="mf">10000000</span>
<span class="k">cdef</span> <span class="kt">float</span> <span class="nf">threshold</span> <span class="o">=</span> <span class="mf">0.25</span>
<span class="k">cdef</span> <span class="kt">Pool</span> <span class="nf">mem</span> <span class="o">=</span> <span class="n">Pool</span><span class="p">()</span>
<span class="k">cdef</span> <span class="kt">Rectangle</span>* <span class="nf">rectangles</span> <span class="o">=</span> <span class="o"><</span><span class="n">Rectangle</span><span class="o">*></span><span class="n">mem</span><span class="o">.</span><span class="n">alloc</span><span class="p">(</span><span class="n">n_rectangles</span><span class="p">,</span> <span class="n">sizeof</span><span class="p">(</span><span class="n">Rectangle</span><span class="p">))</span>
<span class="k">for</span> <span class="n">i</span> <span class="ow">in</span> <span class="nb">range</span><span class="p">(</span><span class="n">n_rectangles</span><span class="p">):</span>
<span class="n">rectangles</span><span class="p">[</span><span class="n">i</span><span class="p">]</span><span class="o">.</span><span class="n">w</span> <span class="o">=</span> <span class="n">random</span><span class="p">()</span>
<span class="n">rectangles</span><span class="p">[</span><span class="n">i</span><span class="p">]</span><span class="o">.</span><span class="n">h</span> <span class="o">=</span> <span class="n">random</span><span class="p">()</span>
<span class="n">n_out</span> <span class="o">=</span> <span class="n">check_rectangles_cy</span><span class="p">(</span><span class="n">rectangles</span><span class="p">,</span> <span class="n">n_rectangles</span><span class="p">,</span> <span class="n">threshold</span><span class="p">)</span>
<span class="k">print</span><span class="p">(</span><span class="n">n_out</span><span class="p">)</span>
</pre></div>
</div>
</div>
</div>
<div class="output_wrapper">
<div class="output">
<div class="output_area">
<div class="prompt output_prompt">Out[53]:</div>
<div class="output_html rendered_html output_subarea output_execute_result">
<!DOCTYPE html>
<!-- Generated by Cython 0.28.3 -->
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<title>Cython: _cython_magic_8305ca5d7d676d0e8a3d2abadd94b0ce.pyx</title>
<style type="text/css">
body.cython { font-family: courier; font-size: 12; }
.cython.tag { }
.cython.line { margin: 0em }
.cython.code { font-size: 9; color: #444444; display: none; margin: 0px 0px 0px 8px; border-left: 8px none; }
.cython.line .run { background-color: #B0FFB0; }
.cython.line .mis { background-color: #FFB0B0; }
.cython.code.run { border-left: 8px solid #B0FFB0; }
.cython.code.mis { border-left: 8px solid #FFB0B0; }
.cython.code .py_c_api { color: red; }
.cython.code .py_macro_api { color: #FF7000; }
.cython.code .pyx_c_api { color: #FF3000; }
.cython.code .pyx_macro_api { color: #FF7000; }
.cython.code .refnanny { color: #FFA000; }
.cython.code .trace { color: #FFA000; }
.cython.code .error_goto { color: #FFA000; }
.cython.code .coerce { color: #008000; border: 1px dotted #008000 }
.cython.code .py_attr { color: #FF0000; font-weight: bold; }
.cython.code .c_attr { color: #0000FF; }
.cython.code .py_call { color: #FF0000; font-weight: bold; }
.cython.code .c_call { color: #0000FF; }
.cython.score-0 {background-color: #FFFFff;}
.cython.score-1 {background-color: #FFFFe7;}
.cython.score-2 {background-color: #FFFFd4;}
.cython.score-3 {background-color: #FFFFc4;}
.cython.score-4 {background-color: #FFFFb6;}
.cython.score-5 {background-color: #FFFFaa;}
.cython.score-6 {background-color: #FFFF9f;}
.cython.score-7 {background-color: #FFFF96;}
.cython.score-8 {background-color: #FFFF8d;}
.cython.score-9 {background-color: #FFFF86;}
.cython.score-10 {background-color: #FFFF7f;}
.cython.score-11 {background-color: #FFFF79;}
.cython.score-12 {background-color: #FFFF73;}
.cython.score-13 {background-color: #FFFF6e;}
.cython.score-14 {background-color: #FFFF6a;}
.cython.score-15 {background-color: #FFFF66;}
.cython.score-16 {background-color: #FFFF62;}
.cython.score-17 {background-color: #FFFF5e;}
.cython.score-18 {background-color: #FFFF5b;}
.cython.score-19 {background-color: #FFFF57;}
.cython.score-20 {background-color: #FFFF55;}
.cython.score-21 {background-color: #FFFF52;}
.cython.score-22 {background-color: #FFFF4f;}
.cython.score-23 {background-color: #FFFF4d;}
.cython.score-24 {background-color: #FFFF4b;}
.cython.score-25 {background-color: #FFFF48;}
.cython.score-26 {background-color: #FFFF46;}
.cython.score-27 {background-color: #FFFF44;}
.cython.score-28 {background-color: #FFFF43;}
.cython.score-29 {background-color: #FFFF41;}
.cython.score-30 {background-color: #FFFF3f;}
.cython.score-31 {background-color: #FFFF3e;}
.cython.score-32 {background-color: #FFFF3c;}
.cython.score-33 {background-color: #FFFF3b;}
.cython.score-34 {background-color: #FFFF39;}
.cython.score-35 {background-color: #FFFF38;}
.cython.score-36 {background-color: #FFFF37;}
.cython.score-37 {background-color: #FFFF36;}
.cython.score-38 {background-color: #FFFF35;}
.cython.score-39 {background-color: #FFFF34;}
.cython.score-40 {background-color: #FFFF33;}
.cython.score-41 {background-color: #FFFF32;}
.cython.score-42 {background-color: #FFFF31;}
.cython.score-43 {background-color: #FFFF30;}
.cython.score-44 {background-color: #FFFF2f;}
.cython.score-45 {background-color: #FFFF2e;}
.cython.score-46 {background-color: #FFFF2d;}
.cython.score-47 {background-color: #FFFF2c;}
.cython.score-48 {background-color: #FFFF2b;}
.cython.score-49 {background-color: #FFFF2b;}
.cython.score-50 {background-color: #FFFF2a;}
.cython.score-51 {background-color: #FFFF29;}
.cython.score-52 {background-color: #FFFF29;}
.cython.score-53 {background-color: #FFFF28;}
.cython.score-54 {background-color: #FFFF27;}
.cython.score-55 {background-color: #FFFF27;}
.cython.score-56 {background-color: #FFFF26;}
.cython.score-57 {background-color: #FFFF26;}
.cython.score-58 {background-color: #FFFF25;}
.cython.score-59 {background-color: #FFFF24;}
.cython.score-60 {background-color: #FFFF24;}
.cython.score-61 {background-color: #FFFF23;}
.cython.score-62 {background-color: #FFFF23;}
.cython.score-63 {background-color: #FFFF22;}
.cython.score-64 {background-color: #FFFF22;}
.cython.score-65 {background-color: #FFFF22;}
.cython.score-66 {background-color: #FFFF21;}
.cython.score-67 {background-color: #FFFF21;}
.cython.score-68 {background-color: #FFFF20;}
.cython.score-69 {background-color: #FFFF20;}
.cython.score-70 {background-color: #FFFF1f;}
.cython.score-71 {background-color: #FFFF1f;}
.cython.score-72 {background-color: #FFFF1f;}
.cython.score-73 {background-color: #FFFF1e;}
.cython.score-74 {background-color: #FFFF1e;}
.cython.score-75 {background-color: #FFFF1e;}
.cython.score-76 {background-color: #FFFF1d;}
.cython.score-77 {background-color: #FFFF1d;}
.cython.score-78 {background-color: #FFFF1c;}
.cython.score-79 {background-color: #FFFF1c;}
.cython.score-80 {background-color: #FFFF1c;}
.cython.score-81 {background-color: #FFFF1c;}
.cython.score-82 {background-color: #FFFF1b;}
.cython.score-83 {background-color: #FFFF1b;}
.cython.score-84 {background-color: #FFFF1b;}
.cython.score-85 {background-color: #FFFF1a;}
.cython.score-86 {background-color: #FFFF1a;}
.cython.score-87 {background-color: #FFFF1a;}
.cython.score-88 {background-color: #FFFF1a;}
.cython.score-89 {background-color: #FFFF19;}
.cython.score-90 {background-color: #FFFF19;}
.cython.score-91 {background-color: #FFFF19;}
.cython.score-92 {background-color: #FFFF19;}
.cython.score-93 {background-color: #FFFF18;}
.cython.score-94 {background-color: #FFFF18;}
.cython.score-95 {background-color: #FFFF18;}
.cython.score-96 {background-color: #FFFF18;}
.cython.score-97 {background-color: #FFFF17;}
.cython.score-98 {background-color: #FFFF17;}
.cython.score-99 {background-color: #FFFF17;}
.cython.score-100 {background-color: #FFFF17;}
.cython.score-101 {background-color: #FFFF16;}
.cython.score-102 {background-color: #FFFF16;}
.cython.score-103 {background-color: #FFFF16;}
.cython.score-104 {background-color: #FFFF16;}
.cython.score-105 {background-color: #FFFF16;}
.cython.score-106 {background-color: #FFFF15;}
.cython.score-107 {background-color: #FFFF15;}
.cython.score-108 {background-color: #FFFF15;}
.cython.score-109 {background-color: #FFFF15;}
.cython.score-110 {background-color: #FFFF15;}
.cython.score-111 {background-color: #FFFF15;}
.cython.score-112 {background-color: #FFFF14;}
.cython.score-113 {background-color: #FFFF14;}
.cython.score-114 {background-color: #FFFF14;}
.cython.score-115 {background-color: #FFFF14;}
.cython.score-116 {background-color: #FFFF14;}
.cython.score-117 {background-color: #FFFF14;}
.cython.score-118 {background-color: #FFFF13;}
.cython.score-119 {background-color: #FFFF13;}
.cython.score-120 {background-color: #FFFF13;}
.cython.score-121 {background-color: #FFFF13;}
.cython.score-122 {background-color: #FFFF13;}
.cython.score-123 {background-color: #FFFF13;}
.cython.score-124 {background-color: #FFFF13;}
.cython.score-125 {background-color: #FFFF12;}
.cython.score-126 {background-color: #FFFF12;}
.cython.score-127 {background-color: #FFFF12;}
.cython.score-128 {background-color: #FFFF12;}
.cython.score-129 {background-color: #FFFF12;}
.cython.score-130 {background-color: #FFFF12;}
.cython.score-131 {background-color: #FFFF12;}
.cython.score-132 {background-color: #FFFF11;}
.cython.score-133 {background-color: #FFFF11;}
.cython.score-134 {background-color: #FFFF11;}
.cython.score-135 {background-color: #FFFF11;}
.cython.score-136 {background-color: #FFFF11;}
.cython.score-137 {background-color: #FFFF11;}
.cython.score-138 {background-color: #FFFF11;}
.cython.score-139 {background-color: #FFFF11;}
.cython.score-140 {background-color: #FFFF11;}
.cython.score-141 {background-color: #FFFF10;}
.cython.score-142 {background-color: #FFFF10;}
.cython.score-143 {background-color: #FFFF10;}
.cython.score-144 {background-color: #FFFF10;}
.cython.score-145 {background-color: #FFFF10;}
.cython.score-146 {background-color: #FFFF10;}
.cython.score-147 {background-color: #FFFF10;}
.cython.score-148 {background-color: #FFFF10;}
.cython.score-149 {background-color: #FFFF10;}
.cython.score-150 {background-color: #FFFF0f;}
.cython.score-151 {background-color: #FFFF0f;}
.cython.score-152 {background-color: #FFFF0f;}
.cython.score-153 {background-color: #FFFF0f;}
.cython.score-154 {background-color: #FFFF0f;}
.cython.score-155 {background-color: #FFFF0f;}
.cython.score-156 {background-color: #FFFF0f;}
.cython.score-157 {background-color: #FFFF0f;}
.cython.score-158 {background-color: #FFFF0f;}
.cython.score-159 {background-color: #FFFF0f;}
.cython.score-160 {background-color: #FFFF0f;}
.cython.score-161 {background-color: #FFFF0e;}
.cython.score-162 {background-color: #FFFF0e;}
.cython.score-163 {background-color: #FFFF0e;}
.cython.score-164 {background-color: #FFFF0e;}
.cython.score-165 {background-color: #FFFF0e;}
.cython.score-166 {background-color: #FFFF0e;}
.cython.score-167 {background-color: #FFFF0e;}
.cython.score-168 {background-color: #FFFF0e;}
.cython.score-169 {background-color: #FFFF0e;}
.cython.score-170 {background-color: #FFFF0e;}
.cython.score-171 {background-color: #FFFF0e;}
.cython.score-172 {background-color: #FFFF0e;}
.cython.score-173 {background-color: #FFFF0d;}
.cython.score-174 {background-color: #FFFF0d;}
.cython.score-175 {background-color: #FFFF0d;}
.cython.score-176 {background-color: #FFFF0d;}
.cython.score-177 {background-color: #FFFF0d;}
.cython.score-178 {background-color: #FFFF0d;}
.cython.score-179 {background-color: #FFFF0d;}
.cython.score-180 {background-color: #FFFF0d;}
.cython.score-181 {background-color: #FFFF0d;}
.cython.score-182 {background-color: #FFFF0d;}
.cython.score-183 {background-color: #FFFF0d;}
.cython.score-184 {background-color: #FFFF0d;}
.cython.score-185 {background-color: #FFFF0d;}
.cython.score-186 {background-color: #FFFF0d;}
.cython.score-187 {background-color: #FFFF0c;}
.cython.score-188 {background-color: #FFFF0c;}
.cython.score-189 {background-color: #FFFF0c;}
.cython.score-190 {background-color: #FFFF0c;}
.cython.score-191 {background-color: #FFFF0c;}
.cython.score-192 {background-color: #FFFF0c;}
.cython.score-193 {background-color: #FFFF0c;}
.cython.score-194 {background-color: #FFFF0c;}
.cython.score-195 {background-color: #FFFF0c;}
.cython.score-196 {background-color: #FFFF0c;}
.cython.score-197 {background-color: #FFFF0c;}
.cython.score-198 {background-color: #FFFF0c;}
.cython.score-199 {background-color: #FFFF0c;}
.cython.score-200 {background-color: #FFFF0c;}
.cython.score-201 {background-color: #FFFF0c;}
.cython.score-202 {background-color: #FFFF0c;}
.cython.score-203 {background-color: #FFFF0b;}
.cython.score-204 {background-color: #FFFF0b;}
.cython.score-205 {background-color: #FFFF0b;}
.cython.score-206 {background-color: #FFFF0b;}
.cython.score-207 {background-color: #FFFF0b;}
.cython.score-208 {background-color: #FFFF0b;}
.cython.score-209 {background-color: #FFFF0b;}
.cython.score-210 {background-color: #FFFF0b;}
.cython.score-211 {background-color: #FFFF0b;}
.cython.score-212 {background-color: #FFFF0b;}
.cython.score-213 {background-color: #FFFF0b;}
.cython.score-214 {background-color: #FFFF0b;}
.cython.score-215 {background-color: #FFFF0b;}
.cython.score-216 {background-color: #FFFF0b;}
.cython.score-217 {background-color: #FFFF0b;}
.cython.score-218 {background-color: #FFFF0b;}
.cython.score-219 {background-color: #FFFF0b;}
.cython.score-220 {background-color: #FFFF0b;}
.cython.score-221 {background-color: #FFFF0b;}
.cython.score-222 {background-color: #FFFF0a;}
.cython.score-223 {background-color: #FFFF0a;}
.cython.score-224 {background-color: #FFFF0a;}
.cython.score-225 {background-color: #FFFF0a;}
.cython.score-226 {background-color: #FFFF0a;}
.cython.score-227 {background-color: #FFFF0a;}
.cython.score-228 {background-color: #FFFF0a;}
.cython.score-229 {background-color: #FFFF0a;}
.cython.score-230 {background-color: #FFFF0a;}
.cython.score-231 {background-color: #FFFF0a;}
.cython.score-232 {background-color: #FFFF0a;}
.cython.score-233 {background-color: #FFFF0a;}
.cython.score-234 {background-color: #FFFF0a;}
.cython.score-235 {background-color: #FFFF0a;}
.cython.score-236 {background-color: #FFFF0a;}
.cython.score-237 {background-color: #FFFF0a;}
.cython.score-238 {background-color: #FFFF0a;}
.cython.score-239 {background-color: #FFFF0a;}
.cython.score-240 {background-color: #FFFF0a;}
.cython.score-241 {background-color: #FFFF0a;}
.cython.score-242 {background-color: #FFFF0a;}
.cython.score-243 {background-color: #FFFF0a;}
.cython.score-244 {background-color: #FFFF0a;}
.cython.score-245 {background-color: #FFFF0a;}
.cython.score-246 {background-color: #FFFF09;}
.cython.score-247 {background-color: #FFFF09;}
.cython.score-248 {background-color: #FFFF09;}
.cython.score-249 {background-color: #FFFF09;}
.cython.score-250 {background-color: #FFFF09;}
.cython.score-251 {background-color: #FFFF09;}
.cython.score-252 {background-color: #FFFF09;}
.cython.score-253 {background-color: #FFFF09;}
.cython.score-254 {background-color: #FFFF09;}
.cython .hll { background-color: #ffffcc }
.cython { background: #f8f8f8; }
.cython .c { color: #408080; font-style: italic } /* Comment */
.cython .err { border: 1px solid #FF0000 } /* Error */
.cython .k { color: #008000; font-weight: bold } /* Keyword */
.cython .o { color: #666666 } /* Operator */
.cython .ch { color: #408080; font-style: italic } /* Comment.Hashbang */
.cython .cm { color: #408080; font-style: italic } /* Comment.Multiline */
.cython .cp { color: #BC7A00 } /* Comment.Preproc */
.cython .cpf { color: #408080; font-style: italic } /* Comment.PreprocFile */
.cython .c1 { color: #408080; font-style: italic } /* Comment.Single */
.cython .cs { color: #408080; font-style: italic } /* Comment.Special */
.cython .gd { color: #A00000 } /* Generic.Deleted */
.cython .ge { font-style: italic } /* Generic.Emph */
.cython .gr { color: #FF0000 } /* Generic.Error */
.cython .gh { color: #000080; font-weight: bold } /* Generic.Heading */
.cython .gi { color: #00A000 } /* Generic.Inserted */
.cython .go { color: #888888 } /* Generic.Output */
.cython .gp { color: #000080; font-weight: bold } /* Generic.Prompt */
.cython .gs { font-weight: bold } /* Generic.Strong */
.cython .gu { color: #800080; font-weight: bold } /* Generic.Subheading */
.cython .gt { color: #0044DD } /* Generic.Traceback */
.cython .kc { color: #008000; font-weight: bold } /* Keyword.Constant */
.cython .kd { color: #008000; font-weight: bold } /* Keyword.Declaration */
.cython .kn { color: #008000; font-weight: bold } /* Keyword.Namespace */
.cython .kp { color: #008000 } /* Keyword.Pseudo */
.cython .kr { color: #008000; font-weight: bold } /* Keyword.Reserved */
.cython .kt { color: #B00040 } /* Keyword.Type */
.cython .m { color: #666666 } /* Literal.Number */
.cython .s { color: #BA2121 } /* Literal.String */
.cython .na { color: #7D9029 } /* Name.Attribute */
.cython .nb { color: #008000 } /* Name.Builtin */
.cython .nc { color: #0000FF; font-weight: bold } /* Name.Class */
.cython .no { color: #880000 } /* Name.Constant */
.cython .nd { color: #AA22FF } /* Name.Decorator */
.cython .ni { color: #999999; font-weight: bold } /* Name.Entity */
.cython .ne { color: #D2413A; font-weight: bold } /* Name.Exception */
.cython .nf { color: #0000FF } /* Name.Function */
.cython .nl { color: #A0A000 } /* Name.Label */
.cython .nn { color: #0000FF; font-weight: bold } /* Name.Namespace */
.cython .nt { color: #008000; font-weight: bold } /* Name.Tag */
.cython .nv { color: #19177C } /* Name.Variable */
.cython .ow { color: #AA22FF; font-weight: bold } /* Operator.Word */
.cython .w { color: #bbbbbb } /* Text.Whitespace */
.cython .mb { color: #666666 } /* Literal.Number.Bin */
.cython .mf { color: #666666 } /* Literal.Number.Float */
.cython .mh { color: #666666 } /* Literal.Number.Hex */
.cython .mi { color: #666666 } /* Literal.Number.Integer */
.cython .mo { color: #666666 } /* Literal.Number.Oct */
.cython .sa { color: #BA2121 } /* Literal.String.Affix */
.cython .sb { color: #BA2121 } /* Literal.String.Backtick */
.cython .sc { color: #BA2121 } /* Literal.String.Char */
.cython .dl { color: #BA2121 } /* Literal.String.Delimiter */
.cython .sd { color: #BA2121; font-style: italic } /* Literal.String.Doc */
.cython .s2 { color: #BA2121 } /* Literal.String.Double */
.cython .se { color: #BB6622; font-weight: bold } /* Literal.String.Escape */
.cython .sh { color: #BA2121 } /* Literal.String.Heredoc */
.cython .si { color: #BB6688; font-weight: bold } /* Literal.String.Interpol */
.cython .sx { color: #008000 } /* Literal.String.Other */
.cython .sr { color: #BB6688 } /* Literal.String.Regex */
.cython .s1 { color: #BA2121 } /* Literal.String.Single */
.cython .ss { color: #19177C } /* Literal.String.Symbol */
.cython .bp { color: #008000 } /* Name.Builtin.Pseudo */
.cython .fm { color: #0000FF } /* Name.Function.Magic */
.cython .vc { color: #19177C } /* Name.Variable.Class */
.cython .vg { color: #19177C } /* Name.Variable.Global */
.cython .vi { color: #19177C } /* Name.Variable.Instance */
.cython .vm { color: #19177C } /* Name.Variable.Magic */
.cython .il { color: #666666 } /* Literal.Number.Integer.Long */
</style>
</head>
<body class="cython">
<p><span style="border-bottom: solid 1px grey;">Generated by Cython 0.28.3</span></p>
<p>
<span style="background-color: #FFFF00">Yellow lines</span> hint at Python interaction.<br />
Click on a line that starts with a "<code>+</code>" to see the C code that Cython generated for it.
</p>
<div class="cython"><pre class="cython line score-0"> <span class="">01</span>: <span class="k">from</span> <span class="nn">cymem.cymem</span> <span class="k">cimport</span> <span class="n">Pool</span></pre>
<pre class="cython line score-19" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">02</span>: <span class="k">from</span> <span class="nn">random</span> <span class="k">import</span> <span class="n">random</span></pre>
<pre class='cython code score-19 '> __pyx_t_1 = <span class='py_c_api'>PyList_New</span>(1);<span class='error_goto'> if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 2, __pyx_L1_error)</span>
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_1);
<span class='pyx_macro_api'>__Pyx_INCREF</span>(__pyx_n_s_random);
<span class='refnanny'>__Pyx_GIVEREF</span>(__pyx_n_s_random);
<span class='py_macro_api'>PyList_SET_ITEM</span>(__pyx_t_1, 0, __pyx_n_s_random);
__pyx_t_2 = <span class='pyx_c_api'>__Pyx_Import</span>(__pyx_n_s_random, __pyx_t_1, 0);<span class='error_goto'> if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 2, __pyx_L1_error)</span>
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_2);
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_t_1); __pyx_t_1 = 0;
__pyx_t_1 = <span class='pyx_c_api'>__Pyx_ImportFrom</span>(__pyx_t_2, __pyx_n_s_random);<span class='error_goto'> if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 2, __pyx_L1_error)</span>
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_1);
if (<span class='py_c_api'>PyDict_SetItem</span>(__pyx_d, __pyx_n_s_random, __pyx_t_1) < 0) <span class='error_goto'>__PYX_ERR(0, 2, __pyx_L1_error)</span>
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_t_1); __pyx_t_1 = 0;
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_t_2); __pyx_t_2 = 0;
</pre><pre class="cython line score-0"> <span class="">03</span>: </pre>
<pre class="cython line score-0" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">04</span>: <span class="k">cdef</span> <span class="k">struct</span> <span class="nf">Rectangle</span><span class="p">:</span></pre>
<pre class='cython code score-0 '>struct __pyx_t_46_cython_magic_8305ca5d7d676d0e8a3d2abadd94b0ce_Rectangle {
float w;
float h;
};
</pre><pre class="cython line score-0"> <span class="">05</span>: <span class="nb">float</span> <span class="n">w</span></pre>
<pre class="cython line score-0"> <span class="">06</span>: <span class="nb">float</span> <span class="n">h</span></pre>
<pre class="cython line score-0"> <span class="">07</span>: </pre>
<pre class="cython line score-0" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">08</span>: <span class="k">cdef</span> <span class="kt">int</span> <span class="nf">check_rectangles_cy</span><span class="p">(</span><span class="n">Rectangle</span><span class="o">*</span> <span class="n">rectangles</span><span class="p">,</span> <span class="nb">int</span> <span class="n">n_rectangles</span><span class="p">,</span> <span class="nb">float</span> <span class="n">threshold</span><span class="p">):</span></pre>
<pre class='cython code score-0 '>static int __pyx_f_46_cython_magic_8305ca5d7d676d0e8a3d2abadd94b0ce_check_rectangles_cy(struct __pyx_t_46_cython_magic_8305ca5d7d676d0e8a3d2abadd94b0ce_Rectangle *__pyx_v_rectangles, int __pyx_v_n_rectangles, float __pyx_v_threshold) {
int __pyx_v_n_out;
struct __pyx_t_46_cython_magic_8305ca5d7d676d0e8a3d2abadd94b0ce_Rectangle __pyx_v_rectangle;
int __pyx_r;
<span class='refnanny'>__Pyx_RefNannyDeclarations</span>
<span class='refnanny'>__Pyx_RefNannySetupContext</span>("check_rectangles_cy", 0);
/* … */
/* function exit code */
__pyx_L0:;
<span class='refnanny'>__Pyx_RefNannyFinishContext</span>();
return __pyx_r;
}
</pre><pre class="cython line score-0" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">09</span>: <span class="k">cdef</span> <span class="kt">int</span> <span class="nf">n_out</span> <span class="o">=</span> <span class="mf">0</span></pre>
<pre class='cython code score-0 '> __pyx_v_n_out = 0;
</pre><pre class="cython line score-0"> <span class="">10</span>: <span class="c"># C arrays contain no size information => we need to state it explicitly</span></pre>
<pre class="cython line score-0" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">11</span>: <span class="k">for</span> <span class="n">rectangle</span> <span class="ow">in</span> <span class="n">rectangles</span><span class="p">[:</span><span class="n">n_rectangles</span><span class="p">]:</span></pre>
<pre class='cython code score-0 '> __pyx_t_2 = (__pyx_v_rectangles + __pyx_v_n_rectangles);
for (__pyx_t_3 = __pyx_v_rectangles; __pyx_t_3 < __pyx_t_2; __pyx_t_3++) {
__pyx_t_1 = __pyx_t_3;
__pyx_v_rectangle = (__pyx_t_1[0]);
</pre><pre class="cython line score-0" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">12</span>: <span class="k">if</span> <span class="n">rectangle</span><span class="o">.</span><span class="n">w</span> <span class="o">*</span> <span class="n">rectangle</span><span class="o">.</span><span class="n">h</span> <span class="o">></span> <span class="n">threshold</span><span class="p">:</span></pre>
<pre class='cython code score-0 '> __pyx_t_4 = (((__pyx_v_rectangle.w * __pyx_v_rectangle.h) > __pyx_v_threshold) != 0);
if (__pyx_t_4) {
/* … */
}
}
</pre><pre class="cython line score-0" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">13</span>: <span class="n">n_out</span> <span class="o">+=</span> <span class="mf">1</span></pre>
<pre class='cython code score-0 '> __pyx_v_n_out = (__pyx_v_n_out + 1);
</pre><pre class="cython line score-0" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">14</span>: <span class="k">return</span> <span class="n">n_out</span></pre>
<pre class='cython code score-0 '> __pyx_r = __pyx_v_n_out;
goto __pyx_L0;
</pre><pre class="cython line score-0"> <span class="">15</span>: </pre>
<pre class="cython line score-11" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">16</span>: <span class="k">cpdef</span> <span class="nf">main_rectangles_fast</span><span class="p">():</span></pre>
<pre class='cython code score-11 '>static PyObject *__pyx_pw_46_cython_magic_8305ca5d7d676d0e8a3d2abadd94b0ce_1main_rectangles_fast(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused); /*proto*/
static PyObject *__pyx_f_46_cython_magic_8305ca5d7d676d0e8a3d2abadd94b0ce_main_rectangles_fast(CYTHON_UNUSED int __pyx_skip_dispatch) {
int __pyx_v_n_rectangles;
float __pyx_v_threshold;
struct __pyx_obj_5cymem_5cymem_Pool *__pyx_v_mem = 0;
struct __pyx_t_46_cython_magic_8305ca5d7d676d0e8a3d2abadd94b0ce_Rectangle *__pyx_v_rectangles;
int __pyx_v_i;
int __pyx_v_n_out;
PyObject *__pyx_r = NULL;
<span class='refnanny'>__Pyx_RefNannyDeclarations</span>
<span class='refnanny'>__Pyx_RefNannySetupContext</span>("main_rectangles_fast", 0);
/* … */
/* function exit code */
__pyx_r = Py_None; <span class='pyx_macro_api'>__Pyx_INCREF</span>(Py_None);
goto __pyx_L0;
__pyx_L1_error:;
<span class='pyx_macro_api'>__Pyx_XDECREF</span>(__pyx_t_1);
<span class='pyx_macro_api'>__Pyx_XDECREF</span>(__pyx_t_6);
<span class='pyx_macro_api'>__Pyx_XDECREF</span>(__pyx_t_7);
<span class='pyx_c_api'>__Pyx_AddTraceback</span>("_cython_magic_8305ca5d7d676d0e8a3d2abadd94b0ce.main_rectangles_fast", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = 0;
__pyx_L0:;
<span class='pyx_macro_api'>__Pyx_XDECREF</span>((PyObject *)__pyx_v_mem);
<span class='refnanny'>__Pyx_XGIVEREF</span>(__pyx_r);
<span class='refnanny'>__Pyx_RefNannyFinishContext</span>();
return __pyx_r;
}
/* Python wrapper */
static PyObject *__pyx_pw_46_cython_magic_8305ca5d7d676d0e8a3d2abadd94b0ce_1main_rectangles_fast(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused); /*proto*/
static PyObject *__pyx_pw_46_cython_magic_8305ca5d7d676d0e8a3d2abadd94b0ce_1main_rectangles_fast(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused) {
PyObject *__pyx_r = 0;
<span class='refnanny'>__Pyx_RefNannyDeclarations</span>
<span class='refnanny'>__Pyx_RefNannySetupContext</span>("main_rectangles_fast (wrapper)", 0);
__pyx_r = __pyx_pf_46_cython_magic_8305ca5d7d676d0e8a3d2abadd94b0ce_main_rectangles_fast(__pyx_self);
/* function exit code */
<span class='refnanny'>__Pyx_RefNannyFinishContext</span>();
return __pyx_r;
}
static PyObject *__pyx_pf_46_cython_magic_8305ca5d7d676d0e8a3d2abadd94b0ce_main_rectangles_fast(CYTHON_UNUSED PyObject *__pyx_self) {
PyObject *__pyx_r = NULL;
<span class='refnanny'>__Pyx_RefNannyDeclarations</span>
<span class='refnanny'>__Pyx_RefNannySetupContext</span>("main_rectangles_fast", 0);
<span class='pyx_macro_api'>__Pyx_XDECREF</span>(__pyx_r);
__pyx_t_1 = __pyx_f_46_cython_magic_8305ca5d7d676d0e8a3d2abadd94b0ce_main_rectangles_fast(0);<span class='error_goto'> if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 16, __pyx_L1_error)</span>
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_1);
__pyx_r = __pyx_t_1;
__pyx_t_1 = 0;
goto __pyx_L0;
/* function exit code */
__pyx_L1_error:;
<span class='pyx_macro_api'>__Pyx_XDECREF</span>(__pyx_t_1);
<span class='pyx_c_api'>__Pyx_AddTraceback</span>("_cython_magic_8305ca5d7d676d0e8a3d2abadd94b0ce.main_rectangles_fast", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = NULL;
__pyx_L0:;
<span class='refnanny'>__Pyx_XGIVEREF</span>(__pyx_r);
<span class='refnanny'>__Pyx_RefNannyFinishContext</span>();
return __pyx_r;
}
</pre><pre class="cython line score-0" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">17</span>: <span class="k">cdef</span> <span class="kt">int</span> <span class="nf">n_rectangles</span> <span class="o">=</span> <span class="mf">10000000</span></pre>
<pre class='cython code score-0 '> __pyx_v_n_rectangles = 0x989680;
</pre><pre class="cython line score-0" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">18</span>: <span class="k">cdef</span> <span class="kt">float</span> <span class="nf">threshold</span> <span class="o">=</span> <span class="mf">0.25</span></pre>
<pre class='cython code score-0 '> __pyx_v_threshold = 0.25;
</pre><pre class="cython line score-2" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">19</span>: <span class="k">cdef</span> <span class="kt">Pool</span> <span class="nf">mem</span> <span class="o">=</span> <span class="n">Pool</span><span class="p">()</span></pre>
<pre class='cython code score-2 '> __pyx_t_1 = <span class='pyx_c_api'>__Pyx_PyObject_CallNoArg</span>(((PyObject *)__pyx_ptype_5cymem_5cymem_Pool));<span class='error_goto'> if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 19, __pyx_L1_error)</span>
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_1);
__pyx_v_mem = ((struct __pyx_obj_5cymem_5cymem_Pool *)__pyx_t_1);
__pyx_t_1 = 0;
</pre><pre class="cython line score-0" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">20</span>: <span class="k">cdef</span> <span class="kt">Rectangle</span>* <span class="nf">rectangles</span> <span class="o">=</span> <span class="o"><</span><span class="n">Rectangle</span><span class="o">*></span><span class="n">mem</span><span class="o">.</span><span class="n">alloc</span><span class="p">(</span><span class="n">n_rectangles</span><span class="p">,</span> <span class="n">sizeof</span><span class="p">(</span><span class="n">Rectangle</span><span class="p">))</span></pre>
<pre class='cython code score-0 '> __pyx_t_2 = ((struct __pyx_vtabstruct_5cymem_5cymem_Pool *)__pyx_v_mem->__pyx_vtab)->alloc(__pyx_v_mem, __pyx_v_n_rectangles, (sizeof(struct __pyx_t_46_cython_magic_8305ca5d7d676d0e8a3d2abadd94b0ce_Rectangle)));<span class='error_goto'> if (unlikely(__pyx_t_2 == ((void *)NULL))) __PYX_ERR(0, 20, __pyx_L1_error)</span>
__pyx_v_rectangles = ((struct __pyx_t_46_cython_magic_8305ca5d7d676d0e8a3d2abadd94b0ce_Rectangle *)__pyx_t_2);
</pre><pre class="cython line score-0" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">21</span>: <span class="k">for</span> <span class="n">i</span> <span class="ow">in</span> <span class="nb">range</span><span class="p">(</span><span class="n">n_rectangles</span><span class="p">):</span></pre>
<pre class='cython code score-0 '> __pyx_t_3 = __pyx_v_n_rectangles;
__pyx_t_4 = __pyx_t_3;
for (__pyx_t_5 = 0; __pyx_t_5 < __pyx_t_4; __pyx_t_5+=1) {
__pyx_v_i = __pyx_t_5;
</pre><pre class="cython line score-29" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">22</span>: <span class="n">rectangles</span><span class="p">[</span><span class="n">i</span><span class="p">]</span><span class="o">.</span><span class="n">w</span> <span class="o">=</span> <span class="n">random</span><span class="p">()</span></pre>
<pre class='cython code score-29 '> __pyx_t_6 = <span class='pyx_c_api'>__Pyx_GetModuleGlobalName</span>(__pyx_n_s_random);<span class='error_goto'> if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 22, __pyx_L1_error)</span>
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_6);
__pyx_t_7 = NULL;
if (CYTHON_UNPACK_METHODS && unlikely(<span class='py_c_api'>PyMethod_Check</span>(__pyx_t_6))) {
__pyx_t_7 = <span class='py_macro_api'>PyMethod_GET_SELF</span>(__pyx_t_6);
if (likely(__pyx_t_7)) {
PyObject* function = <span class='py_macro_api'>PyMethod_GET_FUNCTION</span>(__pyx_t_6);
<span class='pyx_macro_api'>__Pyx_INCREF</span>(__pyx_t_7);
<span class='pyx_macro_api'>__Pyx_INCREF</span>(function);
<span class='pyx_macro_api'>__Pyx_DECREF_SET</span>(__pyx_t_6, function);
}
}
if (__pyx_t_7) {
__pyx_t_1 = <span class='pyx_c_api'>__Pyx_PyObject_CallOneArg</span>(__pyx_t_6, __pyx_t_7);<span class='error_goto'> if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 22, __pyx_L1_error)</span>
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_t_7); __pyx_t_7 = 0;
} else {
__pyx_t_1 = <span class='pyx_c_api'>__Pyx_PyObject_CallNoArg</span>(__pyx_t_6);<span class='error_goto'> if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 22, __pyx_L1_error)</span>
}
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_1);
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_t_6); __pyx_t_6 = 0;
__pyx_t_8 = __pyx_<span class='py_c_api'>PyFloat_AsFloat</span>(__pyx_t_1); if (unlikely((__pyx_t_8 == (float)-1) && <span class='py_c_api'>PyErr_Occurred</span>())) <span class='error_goto'>__PYX_ERR(0, 22, __pyx_L1_error)</span>
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_t_1); __pyx_t_1 = 0;
(__pyx_v_rectangles[__pyx_v_i]).w = __pyx_t_8;
</pre><pre class="cython line score-29" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">23</span>: <span class="n">rectangles</span><span class="p">[</span><span class="n">i</span><span class="p">]</span><span class="o">.</span><span class="n">h</span> <span class="o">=</span> <span class="n">random</span><span class="p">()</span></pre>
<pre class='cython code score-29 '> __pyx_t_6 = <span class='pyx_c_api'>__Pyx_GetModuleGlobalName</span>(__pyx_n_s_random);<span class='error_goto'> if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 23, __pyx_L1_error)</span>
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_6);
__pyx_t_7 = NULL;
if (CYTHON_UNPACK_METHODS && unlikely(<span class='py_c_api'>PyMethod_Check</span>(__pyx_t_6))) {
__pyx_t_7 = <span class='py_macro_api'>PyMethod_GET_SELF</span>(__pyx_t_6);
if (likely(__pyx_t_7)) {
PyObject* function = <span class='py_macro_api'>PyMethod_GET_FUNCTION</span>(__pyx_t_6);
<span class='pyx_macro_api'>__Pyx_INCREF</span>(__pyx_t_7);
<span class='pyx_macro_api'>__Pyx_INCREF</span>(function);
<span class='pyx_macro_api'>__Pyx_DECREF_SET</span>(__pyx_t_6, function);
}
}
if (__pyx_t_7) {
__pyx_t_1 = <span class='pyx_c_api'>__Pyx_PyObject_CallOneArg</span>(__pyx_t_6, __pyx_t_7);<span class='error_goto'> if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 23, __pyx_L1_error)</span>
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_t_7); __pyx_t_7 = 0;
} else {
__pyx_t_1 = <span class='pyx_c_api'>__Pyx_PyObject_CallNoArg</span>(__pyx_t_6);<span class='error_goto'> if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 23, __pyx_L1_error)</span>
}
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_1);
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_t_6); __pyx_t_6 = 0;
__pyx_t_8 = __pyx_<span class='py_c_api'>PyFloat_AsFloat</span>(__pyx_t_1); if (unlikely((__pyx_t_8 == (float)-1) && <span class='py_c_api'>PyErr_Occurred</span>())) <span class='error_goto'>__PYX_ERR(0, 23, __pyx_L1_error)</span>
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_t_1); __pyx_t_1 = 0;
(__pyx_v_rectangles[__pyx_v_i]).h = __pyx_t_8;
}
</pre><pre class="cython line score-0" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">24</span>: <span class="n">n_out</span> <span class="o">=</span> <span class="n">check_rectangles_cy</span><span class="p">(</span><span class="n">rectangles</span><span class="p">,</span> <span class="n">n_rectangles</span><span class="p">,</span> <span class="n">threshold</span><span class="p">)</span></pre>
<pre class='cython code score-0 '> __pyx_v_n_out = __pyx_f_46_cython_magic_8305ca5d7d676d0e8a3d2abadd94b0ce_check_rectangles_cy(__pyx_v_rectangles, __pyx_v_n_rectangles, __pyx_v_threshold);
</pre><pre class="cython line score-6" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">25</span>: <span class="k">print</span><span class="p">(</span><span class="n">n_out</span><span class="p">)</span></pre>
<pre class='cython code score-6 '> __pyx_t_1 = <span class='pyx_c_api'>__Pyx_PyInt_From_int</span>(__pyx_v_n_out);<span class='error_goto'> if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 25, __pyx_L1_error)</span>
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_1);
__pyx_t_6 = <span class='pyx_c_api'>__Pyx_PyObject_CallOneArg</span>(__pyx_builtin_print, __pyx_t_1);<span class='error_goto'> if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 25, __pyx_L1_error)</span>
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_6);
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_t_1); __pyx_t_1 = 0;
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_t_6); __pyx_t_6 = 0;
</pre></div></body></html>
</div>
</div>
</div>
</div>
</div>
<div class="cell border-box-sizing text_cell rendered"><div class="prompt input_prompt">
</div>
<div class="inner_cell">
<div class="text_cell_render border-box-sizing rendered_html">
<p>The important element here is that lines 11 to 13 are not highlighted which means they will be running at the fastest possible speed.</p>
<p>It's ok to have yellow lines in the <code>main_rectangle_fast</code> function as this function will only be called once when we execute our program anyway. The yellow lines 22 and 23 are initialization lines that we could avoid by using a C level random function like <code>stdlib rand()</code> but we didn't want to clutter this example.</p>
<p>Now here is an example of the previous cython program not optimized (with Python objects in the loop):</p>
</div>
</div>
</div>
<div class="cell border-box-sizing code_cell rendered">
<div class="input">
<div class="prompt input_prompt">In [54]:</div>
<div class="inner_cell">
<div class="input_area">
<div class=" highlight hl-ipython3"><pre><span></span><span class="o">%%</span><span class="n">cython</span> <span class="o">-</span><span class="n">a</span>
<span class="k">from</span> <span class="nn">cymem.cymem</span> <span class="k">cimport</span> <span class="n">Pool</span>
<span class="k">from</span> <span class="nn">random</span> <span class="k">import</span> <span class="n">random</span>
<span class="k">cdef</span> <span class="k">struct</span> <span class="nf">Rectangle</span><span class="p">:</span>
<span class="nb">float</span> <span class="n">w</span>
<span class="nb">float</span> <span class="n">h</span>
<span class="k">cdef</span> <span class="kt">int</span> <span class="nf">check_rectangles_cy</span><span class="p">(</span><span class="n">Rectangle</span><span class="o">*</span> <span class="n">rectangles</span><span class="p">,</span> <span class="nb">int</span> <span class="n">n_rectangles</span><span class="p">,</span> <span class="nb">float</span> <span class="n">threshold</span><span class="p">):</span>
<span class="c"># ========== MODIFICATION ===========</span>
<span class="c"># We changed the following line from `cdef int n_out = 0` to</span>
<span class="n">n_out</span> <span class="o">=</span> <span class="mf">0</span>
<span class="c"># n_out is not defined as an `int` anymore and is now thus a regular Python object</span>
<span class="c"># ===================================</span>
<span class="k">for</span> <span class="n">rectangle</span> <span class="ow">in</span> <span class="n">rectangles</span><span class="p">[:</span><span class="n">n_rectangles</span><span class="p">]:</span>
<span class="k">if</span> <span class="n">rectangle</span><span class="o">.</span><span class="n">w</span> <span class="o">*</span> <span class="n">rectangle</span><span class="o">.</span><span class="n">h</span> <span class="o">></span> <span class="n">threshold</span><span class="p">:</span>
<span class="n">n_out</span> <span class="o">+=</span> <span class="mf">1</span>
<span class="k">return</span> <span class="n">n_out</span>
<span class="k">cpdef</span> <span class="nf">main_rectangles_not_so_fast</span><span class="p">():</span>
<span class="k">cdef</span> <span class="kt">int</span> <span class="nf">n_rectangles</span> <span class="o">=</span> <span class="mf">10000000</span>
<span class="k">cdef</span> <span class="kt">float</span> <span class="nf">threshold</span> <span class="o">=</span> <span class="mf">0.25</span>
<span class="k">cdef</span> <span class="kt">Pool</span> <span class="nf">mem</span> <span class="o">=</span> <span class="n">Pool</span><span class="p">()</span>
<span class="k">cdef</span> <span class="kt">Rectangle</span>* <span class="nf">rectangles</span> <span class="o">=</span> <span class="o"><</span><span class="n">Rectangle</span><span class="o">*></span><span class="n">mem</span><span class="o">.</span><span class="n">alloc</span><span class="p">(</span><span class="n">n_rectangles</span><span class="p">,</span> <span class="n">sizeof</span><span class="p">(</span><span class="n">Rectangle</span><span class="p">))</span>
<span class="k">for</span> <span class="n">i</span> <span class="ow">in</span> <span class="nb">range</span><span class="p">(</span><span class="n">n_rectangles</span><span class="p">):</span>
<span class="n">rectangles</span><span class="p">[</span><span class="n">i</span><span class="p">]</span><span class="o">.</span><span class="n">w</span> <span class="o">=</span> <span class="n">random</span><span class="p">()</span>
<span class="n">rectangles</span><span class="p">[</span><span class="n">i</span><span class="p">]</span><span class="o">.</span><span class="n">h</span> <span class="o">=</span> <span class="n">random</span><span class="p">()</span>
<span class="n">n_out</span> <span class="o">=</span> <span class="n">check_rectangles_cy</span><span class="p">(</span><span class="n">rectangles</span><span class="p">,</span> <span class="n">n_rectangles</span><span class="p">,</span> <span class="n">threshold</span><span class="p">)</span>
<span class="k">print</span><span class="p">(</span><span class="n">n_out</span><span class="p">)</span>
</pre></div>
</div>
</div>
</div>
<div class="output_wrapper">
<div class="output">
<div class="output_area">
<div class="prompt output_prompt">Out[54]:</div>
<div class="output_html rendered_html output_subarea output_execute_result">
<!DOCTYPE html>
<!-- Generated by Cython 0.28.3 -->
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<title>Cython: _cython_magic_dbc2c06a712520185e24b7d477e83d8b.pyx</title>
<style type="text/css">
body.cython { font-family: courier; font-size: 12; }
.cython.tag { }
.cython.line { margin: 0em }
.cython.code { font-size: 9; color: #444444; display: none; margin: 0px 0px 0px 8px; border-left: 8px none; }
.cython.line .run { background-color: #B0FFB0; }
.cython.line .mis { background-color: #FFB0B0; }
.cython.code.run { border-left: 8px solid #B0FFB0; }
.cython.code.mis { border-left: 8px solid #FFB0B0; }
.cython.code .py_c_api { color: red; }
.cython.code .py_macro_api { color: #FF7000; }
.cython.code .pyx_c_api { color: #FF3000; }
.cython.code .pyx_macro_api { color: #FF7000; }
.cython.code .refnanny { color: #FFA000; }
.cython.code .trace { color: #FFA000; }
.cython.code .error_goto { color: #FFA000; }
.cython.code .coerce { color: #008000; border: 1px dotted #008000 }
.cython.code .py_attr { color: #FF0000; font-weight: bold; }
.cython.code .c_attr { color: #0000FF; }
.cython.code .py_call { color: #FF0000; font-weight: bold; }
.cython.code .c_call { color: #0000FF; }
.cython.score-0 {background-color: #FFFFff;}
.cython.score-1 {background-color: #FFFFe7;}
.cython.score-2 {background-color: #FFFFd4;}
.cython.score-3 {background-color: #FFFFc4;}
.cython.score-4 {background-color: #FFFFb6;}
.cython.score-5 {background-color: #FFFFaa;}
.cython.score-6 {background-color: #FFFF9f;}
.cython.score-7 {background-color: #FFFF96;}
.cython.score-8 {background-color: #FFFF8d;}
.cython.score-9 {background-color: #FFFF86;}
.cython.score-10 {background-color: #FFFF7f;}
.cython.score-11 {background-color: #FFFF79;}
.cython.score-12 {background-color: #FFFF73;}
.cython.score-13 {background-color: #FFFF6e;}
.cython.score-14 {background-color: #FFFF6a;}
.cython.score-15 {background-color: #FFFF66;}
.cython.score-16 {background-color: #FFFF62;}
.cython.score-17 {background-color: #FFFF5e;}
.cython.score-18 {background-color: #FFFF5b;}
.cython.score-19 {background-color: #FFFF57;}
.cython.score-20 {background-color: #FFFF55;}
.cython.score-21 {background-color: #FFFF52;}
.cython.score-22 {background-color: #FFFF4f;}
.cython.score-23 {background-color: #FFFF4d;}
.cython.score-24 {background-color: #FFFF4b;}
.cython.score-25 {background-color: #FFFF48;}
.cython.score-26 {background-color: #FFFF46;}
.cython.score-27 {background-color: #FFFF44;}
.cython.score-28 {background-color: #FFFF43;}
.cython.score-29 {background-color: #FFFF41;}
.cython.score-30 {background-color: #FFFF3f;}
.cython.score-31 {background-color: #FFFF3e;}
.cython.score-32 {background-color: #FFFF3c;}
.cython.score-33 {background-color: #FFFF3b;}
.cython.score-34 {background-color: #FFFF39;}
.cython.score-35 {background-color: #FFFF38;}
.cython.score-36 {background-color: #FFFF37;}
.cython.score-37 {background-color: #FFFF36;}
.cython.score-38 {background-color: #FFFF35;}
.cython.score-39 {background-color: #FFFF34;}
.cython.score-40 {background-color: #FFFF33;}
.cython.score-41 {background-color: #FFFF32;}
.cython.score-42 {background-color: #FFFF31;}
.cython.score-43 {background-color: #FFFF30;}
.cython.score-44 {background-color: #FFFF2f;}
.cython.score-45 {background-color: #FFFF2e;}
.cython.score-46 {background-color: #FFFF2d;}
.cython.score-47 {background-color: #FFFF2c;}
.cython.score-48 {background-color: #FFFF2b;}
.cython.score-49 {background-color: #FFFF2b;}
.cython.score-50 {background-color: #FFFF2a;}
.cython.score-51 {background-color: #FFFF29;}
.cython.score-52 {background-color: #FFFF29;}
.cython.score-53 {background-color: #FFFF28;}
.cython.score-54 {background-color: #FFFF27;}
.cython.score-55 {background-color: #FFFF27;}
.cython.score-56 {background-color: #FFFF26;}
.cython.score-57 {background-color: #FFFF26;}
.cython.score-58 {background-color: #FFFF25;}
.cython.score-59 {background-color: #FFFF24;}
.cython.score-60 {background-color: #FFFF24;}
.cython.score-61 {background-color: #FFFF23;}
.cython.score-62 {background-color: #FFFF23;}
.cython.score-63 {background-color: #FFFF22;}
.cython.score-64 {background-color: #FFFF22;}
.cython.score-65 {background-color: #FFFF22;}
.cython.score-66 {background-color: #FFFF21;}
.cython.score-67 {background-color: #FFFF21;}
.cython.score-68 {background-color: #FFFF20;}
.cython.score-69 {background-color: #FFFF20;}
.cython.score-70 {background-color: #FFFF1f;}
.cython.score-71 {background-color: #FFFF1f;}
.cython.score-72 {background-color: #FFFF1f;}
.cython.score-73 {background-color: #FFFF1e;}
.cython.score-74 {background-color: #FFFF1e;}
.cython.score-75 {background-color: #FFFF1e;}
.cython.score-76 {background-color: #FFFF1d;}
.cython.score-77 {background-color: #FFFF1d;}
.cython.score-78 {background-color: #FFFF1c;}
.cython.score-79 {background-color: #FFFF1c;}
.cython.score-80 {background-color: #FFFF1c;}
.cython.score-81 {background-color: #FFFF1c;}
.cython.score-82 {background-color: #FFFF1b;}
.cython.score-83 {background-color: #FFFF1b;}
.cython.score-84 {background-color: #FFFF1b;}
.cython.score-85 {background-color: #FFFF1a;}
.cython.score-86 {background-color: #FFFF1a;}
.cython.score-87 {background-color: #FFFF1a;}
.cython.score-88 {background-color: #FFFF1a;}
.cython.score-89 {background-color: #FFFF19;}
.cython.score-90 {background-color: #FFFF19;}
.cython.score-91 {background-color: #FFFF19;}
.cython.score-92 {background-color: #FFFF19;}
.cython.score-93 {background-color: #FFFF18;}
.cython.score-94 {background-color: #FFFF18;}
.cython.score-95 {background-color: #FFFF18;}
.cython.score-96 {background-color: #FFFF18;}
.cython.score-97 {background-color: #FFFF17;}
.cython.score-98 {background-color: #FFFF17;}
.cython.score-99 {background-color: #FFFF17;}
.cython.score-100 {background-color: #FFFF17;}
.cython.score-101 {background-color: #FFFF16;}
.cython.score-102 {background-color: #FFFF16;}
.cython.score-103 {background-color: #FFFF16;}
.cython.score-104 {background-color: #FFFF16;}
.cython.score-105 {background-color: #FFFF16;}
.cython.score-106 {background-color: #FFFF15;}
.cython.score-107 {background-color: #FFFF15;}
.cython.score-108 {background-color: #FFFF15;}
.cython.score-109 {background-color: #FFFF15;}
.cython.score-110 {background-color: #FFFF15;}
.cython.score-111 {background-color: #FFFF15;}
.cython.score-112 {background-color: #FFFF14;}
.cython.score-113 {background-color: #FFFF14;}
.cython.score-114 {background-color: #FFFF14;}
.cython.score-115 {background-color: #FFFF14;}
.cython.score-116 {background-color: #FFFF14;}
.cython.score-117 {background-color: #FFFF14;}
.cython.score-118 {background-color: #FFFF13;}
.cython.score-119 {background-color: #FFFF13;}
.cython.score-120 {background-color: #FFFF13;}
.cython.score-121 {background-color: #FFFF13;}
.cython.score-122 {background-color: #FFFF13;}
.cython.score-123 {background-color: #FFFF13;}
.cython.score-124 {background-color: #FFFF13;}
.cython.score-125 {background-color: #FFFF12;}
.cython.score-126 {background-color: #FFFF12;}
.cython.score-127 {background-color: #FFFF12;}
.cython.score-128 {background-color: #FFFF12;}
.cython.score-129 {background-color: #FFFF12;}
.cython.score-130 {background-color: #FFFF12;}
.cython.score-131 {background-color: #FFFF12;}
.cython.score-132 {background-color: #FFFF11;}
.cython.score-133 {background-color: #FFFF11;}
.cython.score-134 {background-color: #FFFF11;}
.cython.score-135 {background-color: #FFFF11;}
.cython.score-136 {background-color: #FFFF11;}
.cython.score-137 {background-color: #FFFF11;}
.cython.score-138 {background-color: #FFFF11;}
.cython.score-139 {background-color: #FFFF11;}
.cython.score-140 {background-color: #FFFF11;}
.cython.score-141 {background-color: #FFFF10;}
.cython.score-142 {background-color: #FFFF10;}
.cython.score-143 {background-color: #FFFF10;}
.cython.score-144 {background-color: #FFFF10;}
.cython.score-145 {background-color: #FFFF10;}
.cython.score-146 {background-color: #FFFF10;}
.cython.score-147 {background-color: #FFFF10;}
.cython.score-148 {background-color: #FFFF10;}
.cython.score-149 {background-color: #FFFF10;}
.cython.score-150 {background-color: #FFFF0f;}
.cython.score-151 {background-color: #FFFF0f;}
.cython.score-152 {background-color: #FFFF0f;}
.cython.score-153 {background-color: #FFFF0f;}
.cython.score-154 {background-color: #FFFF0f;}
.cython.score-155 {background-color: #FFFF0f;}
.cython.score-156 {background-color: #FFFF0f;}
.cython.score-157 {background-color: #FFFF0f;}
.cython.score-158 {background-color: #FFFF0f;}
.cython.score-159 {background-color: #FFFF0f;}
.cython.score-160 {background-color: #FFFF0f;}
.cython.score-161 {background-color: #FFFF0e;}
.cython.score-162 {background-color: #FFFF0e;}
.cython.score-163 {background-color: #FFFF0e;}
.cython.score-164 {background-color: #FFFF0e;}
.cython.score-165 {background-color: #FFFF0e;}
.cython.score-166 {background-color: #FFFF0e;}
.cython.score-167 {background-color: #FFFF0e;}
.cython.score-168 {background-color: #FFFF0e;}
.cython.score-169 {background-color: #FFFF0e;}
.cython.score-170 {background-color: #FFFF0e;}
.cython.score-171 {background-color: #FFFF0e;}
.cython.score-172 {background-color: #FFFF0e;}
.cython.score-173 {background-color: #FFFF0d;}
.cython.score-174 {background-color: #FFFF0d;}
.cython.score-175 {background-color: #FFFF0d;}
.cython.score-176 {background-color: #FFFF0d;}
.cython.score-177 {background-color: #FFFF0d;}
.cython.score-178 {background-color: #FFFF0d;}
.cython.score-179 {background-color: #FFFF0d;}
.cython.score-180 {background-color: #FFFF0d;}
.cython.score-181 {background-color: #FFFF0d;}
.cython.score-182 {background-color: #FFFF0d;}
.cython.score-183 {background-color: #FFFF0d;}
.cython.score-184 {background-color: #FFFF0d;}
.cython.score-185 {background-color: #FFFF0d;}
.cython.score-186 {background-color: #FFFF0d;}
.cython.score-187 {background-color: #FFFF0c;}
.cython.score-188 {background-color: #FFFF0c;}
.cython.score-189 {background-color: #FFFF0c;}
.cython.score-190 {background-color: #FFFF0c;}
.cython.score-191 {background-color: #FFFF0c;}
.cython.score-192 {background-color: #FFFF0c;}
.cython.score-193 {background-color: #FFFF0c;}
.cython.score-194 {background-color: #FFFF0c;}
.cython.score-195 {background-color: #FFFF0c;}
.cython.score-196 {background-color: #FFFF0c;}
.cython.score-197 {background-color: #FFFF0c;}
.cython.score-198 {background-color: #FFFF0c;}
.cython.score-199 {background-color: #FFFF0c;}
.cython.score-200 {background-color: #FFFF0c;}
.cython.score-201 {background-color: #FFFF0c;}
.cython.score-202 {background-color: #FFFF0c;}
.cython.score-203 {background-color: #FFFF0b;}
.cython.score-204 {background-color: #FFFF0b;}
.cython.score-205 {background-color: #FFFF0b;}
.cython.score-206 {background-color: #FFFF0b;}
.cython.score-207 {background-color: #FFFF0b;}
.cython.score-208 {background-color: #FFFF0b;}
.cython.score-209 {background-color: #FFFF0b;}
.cython.score-210 {background-color: #FFFF0b;}
.cython.score-211 {background-color: #FFFF0b;}
.cython.score-212 {background-color: #FFFF0b;}
.cython.score-213 {background-color: #FFFF0b;}
.cython.score-214 {background-color: #FFFF0b;}
.cython.score-215 {background-color: #FFFF0b;}
.cython.score-216 {background-color: #FFFF0b;}
.cython.score-217 {background-color: #FFFF0b;}
.cython.score-218 {background-color: #FFFF0b;}
.cython.score-219 {background-color: #FFFF0b;}
.cython.score-220 {background-color: #FFFF0b;}
.cython.score-221 {background-color: #FFFF0b;}
.cython.score-222 {background-color: #FFFF0a;}
.cython.score-223 {background-color: #FFFF0a;}
.cython.score-224 {background-color: #FFFF0a;}
.cython.score-225 {background-color: #FFFF0a;}
.cython.score-226 {background-color: #FFFF0a;}
.cython.score-227 {background-color: #FFFF0a;}
.cython.score-228 {background-color: #FFFF0a;}
.cython.score-229 {background-color: #FFFF0a;}
.cython.score-230 {background-color: #FFFF0a;}
.cython.score-231 {background-color: #FFFF0a;}
.cython.score-232 {background-color: #FFFF0a;}
.cython.score-233 {background-color: #FFFF0a;}
.cython.score-234 {background-color: #FFFF0a;}
.cython.score-235 {background-color: #FFFF0a;}
.cython.score-236 {background-color: #FFFF0a;}
.cython.score-237 {background-color: #FFFF0a;}
.cython.score-238 {background-color: #FFFF0a;}
.cython.score-239 {background-color: #FFFF0a;}
.cython.score-240 {background-color: #FFFF0a;}
.cython.score-241 {background-color: #FFFF0a;}
.cython.score-242 {background-color: #FFFF0a;}
.cython.score-243 {background-color: #FFFF0a;}
.cython.score-244 {background-color: #FFFF0a;}
.cython.score-245 {background-color: #FFFF0a;}
.cython.score-246 {background-color: #FFFF09;}
.cython.score-247 {background-color: #FFFF09;}
.cython.score-248 {background-color: #FFFF09;}
.cython.score-249 {background-color: #FFFF09;}
.cython.score-250 {background-color: #FFFF09;}
.cython.score-251 {background-color: #FFFF09;}
.cython.score-252 {background-color: #FFFF09;}
.cython.score-253 {background-color: #FFFF09;}
.cython.score-254 {background-color: #FFFF09;}
.cython .hll { background-color: #ffffcc }
.cython { background: #f8f8f8; }
.cython .c { color: #408080; font-style: italic } /* Comment */
.cython .err { border: 1px solid #FF0000 } /* Error */
.cython .k { color: #008000; font-weight: bold } /* Keyword */
.cython .o { color: #666666 } /* Operator */
.cython .ch { color: #408080; font-style: italic } /* Comment.Hashbang */
.cython .cm { color: #408080; font-style: italic } /* Comment.Multiline */
.cython .cp { color: #BC7A00 } /* Comment.Preproc */
.cython .cpf { color: #408080; font-style: italic } /* Comment.PreprocFile */
.cython .c1 { color: #408080; font-style: italic } /* Comment.Single */
.cython .cs { color: #408080; font-style: italic } /* Comment.Special */
.cython .gd { color: #A00000 } /* Generic.Deleted */
.cython .ge { font-style: italic } /* Generic.Emph */
.cython .gr { color: #FF0000 } /* Generic.Error */
.cython .gh { color: #000080; font-weight: bold } /* Generic.Heading */
.cython .gi { color: #00A000 } /* Generic.Inserted */
.cython .go { color: #888888 } /* Generic.Output */
.cython .gp { color: #000080; font-weight: bold } /* Generic.Prompt */
.cython .gs { font-weight: bold } /* Generic.Strong */
.cython .gu { color: #800080; font-weight: bold } /* Generic.Subheading */
.cython .gt { color: #0044DD } /* Generic.Traceback */
.cython .kc { color: #008000; font-weight: bold } /* Keyword.Constant */
.cython .kd { color: #008000; font-weight: bold } /* Keyword.Declaration */
.cython .kn { color: #008000; font-weight: bold } /* Keyword.Namespace */
.cython .kp { color: #008000 } /* Keyword.Pseudo */
.cython .kr { color: #008000; font-weight: bold } /* Keyword.Reserved */
.cython .kt { color: #B00040 } /* Keyword.Type */
.cython .m { color: #666666 } /* Literal.Number */
.cython .s { color: #BA2121 } /* Literal.String */
.cython .na { color: #7D9029 } /* Name.Attribute */
.cython .nb { color: #008000 } /* Name.Builtin */
.cython .nc { color: #0000FF; font-weight: bold } /* Name.Class */
.cython .no { color: #880000 } /* Name.Constant */
.cython .nd { color: #AA22FF } /* Name.Decorator */
.cython .ni { color: #999999; font-weight: bold } /* Name.Entity */
.cython .ne { color: #D2413A; font-weight: bold } /* Name.Exception */
.cython .nf { color: #0000FF } /* Name.Function */
.cython .nl { color: #A0A000 } /* Name.Label */
.cython .nn { color: #0000FF; font-weight: bold } /* Name.Namespace */
.cython .nt { color: #008000; font-weight: bold } /* Name.Tag */
.cython .nv { color: #19177C } /* Name.Variable */
.cython .ow { color: #AA22FF; font-weight: bold } /* Operator.Word */
.cython .w { color: #bbbbbb } /* Text.Whitespace */
.cython .mb { color: #666666 } /* Literal.Number.Bin */
.cython .mf { color: #666666 } /* Literal.Number.Float */
.cython .mh { color: #666666 } /* Literal.Number.Hex */
.cython .mi { color: #666666 } /* Literal.Number.Integer */
.cython .mo { color: #666666 } /* Literal.Number.Oct */
.cython .sa { color: #BA2121 } /* Literal.String.Affix */
.cython .sb { color: #BA2121 } /* Literal.String.Backtick */
.cython .sc { color: #BA2121 } /* Literal.String.Char */
.cython .dl { color: #BA2121 } /* Literal.String.Delimiter */
.cython .sd { color: #BA2121; font-style: italic } /* Literal.String.Doc */
.cython .s2 { color: #BA2121 } /* Literal.String.Double */
.cython .se { color: #BB6622; font-weight: bold } /* Literal.String.Escape */
.cython .sh { color: #BA2121 } /* Literal.String.Heredoc */
.cython .si { color: #BB6688; font-weight: bold } /* Literal.String.Interpol */
.cython .sx { color: #008000 } /* Literal.String.Other */
.cython .sr { color: #BB6688 } /* Literal.String.Regex */
.cython .s1 { color: #BA2121 } /* Literal.String.Single */
.cython .ss { color: #19177C } /* Literal.String.Symbol */
.cython .bp { color: #008000 } /* Name.Builtin.Pseudo */
.cython .fm { color: #0000FF } /* Name.Function.Magic */
.cython .vc { color: #19177C } /* Name.Variable.Class */
.cython .vg { color: #19177C } /* Name.Variable.Global */
.cython .vi { color: #19177C } /* Name.Variable.Instance */
.cython .vm { color: #19177C } /* Name.Variable.Magic */
.cython .il { color: #666666 } /* Literal.Number.Integer.Long */
</style>
</head>
<body class="cython">
<p><span style="border-bottom: solid 1px grey;">Generated by Cython 0.28.3</span></p>
<p>
<span style="background-color: #FFFF00">Yellow lines</span> hint at Python interaction.<br />
Click on a line that starts with a "<code>+</code>" to see the C code that Cython generated for it.
</p>
<div class="cython"><pre class="cython line score-0"> <span class="">01</span>: <span class="k">from</span> <span class="nn">cymem.cymem</span> <span class="k">cimport</span> <span class="n">Pool</span></pre>
<pre class="cython line score-19" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">02</span>: <span class="k">from</span> <span class="nn">random</span> <span class="k">import</span> <span class="n">random</span></pre>
<pre class='cython code score-19 '> __pyx_t_1 = <span class='py_c_api'>PyList_New</span>(1);<span class='error_goto'> if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 2, __pyx_L1_error)</span>
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_1);
<span class='pyx_macro_api'>__Pyx_INCREF</span>(__pyx_n_s_random);
<span class='refnanny'>__Pyx_GIVEREF</span>(__pyx_n_s_random);
<span class='py_macro_api'>PyList_SET_ITEM</span>(__pyx_t_1, 0, __pyx_n_s_random);
__pyx_t_2 = <span class='pyx_c_api'>__Pyx_Import</span>(__pyx_n_s_random, __pyx_t_1, 0);<span class='error_goto'> if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 2, __pyx_L1_error)</span>
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_2);
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_t_1); __pyx_t_1 = 0;
__pyx_t_1 = <span class='pyx_c_api'>__Pyx_ImportFrom</span>(__pyx_t_2, __pyx_n_s_random);<span class='error_goto'> if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 2, __pyx_L1_error)</span>
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_1);
if (<span class='py_c_api'>PyDict_SetItem</span>(__pyx_d, __pyx_n_s_random, __pyx_t_1) < 0) <span class='error_goto'>__PYX_ERR(0, 2, __pyx_L1_error)</span>
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_t_1); __pyx_t_1 = 0;
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_t_2); __pyx_t_2 = 0;
</pre><pre class="cython line score-0"> <span class="">03</span>: </pre>
<pre class="cython line score-0" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">04</span>: <span class="k">cdef</span> <span class="k">struct</span> <span class="nf">Rectangle</span><span class="p">:</span></pre>
<pre class='cython code score-0 '>struct __pyx_t_46_cython_magic_dbc2c06a712520185e24b7d477e83d8b_Rectangle {
float w;
float h;
};
</pre><pre class="cython line score-0"> <span class="">05</span>: <span class="nb">float</span> <span class="n">w</span></pre>
<pre class="cython line score-0"> <span class="">06</span>: <span class="nb">float</span> <span class="n">h</span></pre>
<pre class="cython line score-0"> <span class="">07</span>: </pre>
<pre class="cython line score-4" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">08</span>: <span class="k">cdef</span> <span class="kt">int</span> <span class="nf">check_rectangles_cy</span><span class="p">(</span><span class="n">Rectangle</span><span class="o">*</span> <span class="n">rectangles</span><span class="p">,</span> <span class="nb">int</span> <span class="n">n_rectangles</span><span class="p">,</span> <span class="nb">float</span> <span class="n">threshold</span><span class="p">):</span></pre>
<pre class='cython code score-4 '>static int __pyx_f_46_cython_magic_dbc2c06a712520185e24b7d477e83d8b_check_rectangles_cy(struct __pyx_t_46_cython_magic_dbc2c06a712520185e24b7d477e83d8b_Rectangle *__pyx_v_rectangles, int __pyx_v_n_rectangles, float __pyx_v_threshold) {
PyObject *__pyx_v_n_out = NULL;
struct __pyx_t_46_cython_magic_dbc2c06a712520185e24b7d477e83d8b_Rectangle __pyx_v_rectangle;
int __pyx_r;
<span class='refnanny'>__Pyx_RefNannyDeclarations</span>
<span class='refnanny'>__Pyx_RefNannySetupContext</span>("check_rectangles_cy", 0);
/* … */
/* function exit code */
__pyx_L1_error:;
<span class='pyx_macro_api'>__Pyx_XDECREF</span>(__pyx_t_5);
<span class='pyx_c_api'>__Pyx_WriteUnraisable</span>("_cython_magic_dbc2c06a712520185e24b7d477e83d8b.check_rectangles_cy", __pyx_clineno, __pyx_lineno, __pyx_filename, 1, 0);
__pyx_r = 0;
__pyx_L0:;
<span class='pyx_macro_api'>__Pyx_XDECREF</span>(__pyx_v_n_out);
<span class='refnanny'>__Pyx_RefNannyFinishContext</span>();
return __pyx_r;
}
</pre><pre class="cython line score-0"> <span class="">09</span>: <span class="c"># ========== MODIFICATION ===========</span></pre>
<pre class="cython line score-0"> <span class="">10</span>: <span class="c"># We changed the following line from `cdef int n_out = 0` to</span></pre>
<pre class="cython line score-1" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">11</span>: <span class="n">n_out</span> <span class="o">=</span> <span class="mf">0</span></pre>
<pre class='cython code score-1 '> <span class='pyx_macro_api'>__Pyx_INCREF</span>(__pyx_int_0);
__pyx_v_n_out = __pyx_int_0;
</pre><pre class="cython line score-0"> <span class="">12</span>: <span class="c"># n_out is not defined as an `int` anymore and is now thus a regular Python object</span></pre>
<pre class="cython line score-0"> <span class="">13</span>: <span class="c"># ===================================</span></pre>
<pre class="cython line score-0" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">14</span>: <span class="k">for</span> <span class="n">rectangle</span> <span class="ow">in</span> <span class="n">rectangles</span><span class="p">[:</span><span class="n">n_rectangles</span><span class="p">]:</span></pre>
<pre class='cython code score-0 '> __pyx_t_2 = (__pyx_v_rectangles + __pyx_v_n_rectangles);
for (__pyx_t_3 = __pyx_v_rectangles; __pyx_t_3 < __pyx_t_2; __pyx_t_3++) {
__pyx_t_1 = __pyx_t_3;
__pyx_v_rectangle = (__pyx_t_1[0]);
</pre><pre class="cython line score-0" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">15</span>: <span class="k">if</span> <span class="n">rectangle</span><span class="o">.</span><span class="n">w</span> <span class="o">*</span> <span class="n">rectangle</span><span class="o">.</span><span class="n">h</span> <span class="o">></span> <span class="n">threshold</span><span class="p">:</span></pre>
<pre class='cython code score-0 '> __pyx_t_4 = (((__pyx_v_rectangle.w * __pyx_v_rectangle.h) > __pyx_v_threshold) != 0);
if (__pyx_t_4) {
/* … */
}
}
</pre><pre class="cython line score-3" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">16</span>: <span class="n">n_out</span> <span class="o">+=</span> <span class="mf">1</span></pre>
<pre class='cython code score-3 '> __pyx_t_5 = <span class='pyx_c_api'>__Pyx_PyInt_AddObjC</span>(__pyx_v_n_out, __pyx_int_1, 1, 1);<span class='error_goto'> if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 16, __pyx_L1_error)</span>
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_5);
<span class='pyx_macro_api'>__Pyx_DECREF_SET</span>(__pyx_v_n_out, __pyx_t_5);
__pyx_t_5 = 0;
</pre><pre class="cython line score-7" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">17</span>: <span class="k">return</span> <span class="n">n_out</span></pre>
<pre class='cython code score-7 '> __pyx_t_6 = <span class='pyx_c_api'>__Pyx_PyInt_As_int</span>(__pyx_v_n_out); if (unlikely((__pyx_t_6 == (int)-1) && <span class='py_c_api'>PyErr_Occurred</span>())) <span class='error_goto'>__PYX_ERR(0, 17, __pyx_L1_error)</span>
__pyx_r = __pyx_t_6;
goto __pyx_L0;
</pre><pre class="cython line score-0"> <span class="">18</span>: </pre>
<pre class="cython line score-11" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">19</span>: <span class="k">cpdef</span> <span class="nf">main_rectangles_not_so_fast</span><span class="p">():</span></pre>
<pre class='cython code score-11 '>static PyObject *__pyx_pw_46_cython_magic_dbc2c06a712520185e24b7d477e83d8b_1main_rectangles_not_so_fast(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused); /*proto*/
static PyObject *__pyx_f_46_cython_magic_dbc2c06a712520185e24b7d477e83d8b_main_rectangles_not_so_fast(CYTHON_UNUSED int __pyx_skip_dispatch) {
int __pyx_v_n_rectangles;
float __pyx_v_threshold;
struct __pyx_obj_5cymem_5cymem_Pool *__pyx_v_mem = 0;
struct __pyx_t_46_cython_magic_dbc2c06a712520185e24b7d477e83d8b_Rectangle *__pyx_v_rectangles;
int __pyx_v_i;
int __pyx_v_n_out;
PyObject *__pyx_r = NULL;
<span class='refnanny'>__Pyx_RefNannyDeclarations</span>
<span class='refnanny'>__Pyx_RefNannySetupContext</span>("main_rectangles_not_so_fast", 0);
/* … */
/* function exit code */
__pyx_r = Py_None; <span class='pyx_macro_api'>__Pyx_INCREF</span>(Py_None);
goto __pyx_L0;
__pyx_L1_error:;
<span class='pyx_macro_api'>__Pyx_XDECREF</span>(__pyx_t_1);
<span class='pyx_macro_api'>__Pyx_XDECREF</span>(__pyx_t_6);
<span class='pyx_macro_api'>__Pyx_XDECREF</span>(__pyx_t_7);
<span class='pyx_c_api'>__Pyx_AddTraceback</span>("_cython_magic_dbc2c06a712520185e24b7d477e83d8b.main_rectangles_not_so_fast", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = 0;
__pyx_L0:;
<span class='pyx_macro_api'>__Pyx_XDECREF</span>((PyObject *)__pyx_v_mem);
<span class='refnanny'>__Pyx_XGIVEREF</span>(__pyx_r);
<span class='refnanny'>__Pyx_RefNannyFinishContext</span>();
return __pyx_r;
}
/* Python wrapper */
static PyObject *__pyx_pw_46_cython_magic_dbc2c06a712520185e24b7d477e83d8b_1main_rectangles_not_so_fast(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused); /*proto*/
static PyObject *__pyx_pw_46_cython_magic_dbc2c06a712520185e24b7d477e83d8b_1main_rectangles_not_so_fast(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused) {
PyObject *__pyx_r = 0;
<span class='refnanny'>__Pyx_RefNannyDeclarations</span>
<span class='refnanny'>__Pyx_RefNannySetupContext</span>("main_rectangles_not_so_fast (wrapper)", 0);
__pyx_r = __pyx_pf_46_cython_magic_dbc2c06a712520185e24b7d477e83d8b_main_rectangles_not_so_fast(__pyx_self);
/* function exit code */
<span class='refnanny'>__Pyx_RefNannyFinishContext</span>();
return __pyx_r;
}
static PyObject *__pyx_pf_46_cython_magic_dbc2c06a712520185e24b7d477e83d8b_main_rectangles_not_so_fast(CYTHON_UNUSED PyObject *__pyx_self) {
PyObject *__pyx_r = NULL;
<span class='refnanny'>__Pyx_RefNannyDeclarations</span>
<span class='refnanny'>__Pyx_RefNannySetupContext</span>("main_rectangles_not_so_fast", 0);
<span class='pyx_macro_api'>__Pyx_XDECREF</span>(__pyx_r);
__pyx_t_1 = __pyx_f_46_cython_magic_dbc2c06a712520185e24b7d477e83d8b_main_rectangles_not_so_fast(0);<span class='error_goto'> if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 19, __pyx_L1_error)</span>
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_1);
__pyx_r = __pyx_t_1;
__pyx_t_1 = 0;
goto __pyx_L0;
/* function exit code */
__pyx_L1_error:;
<span class='pyx_macro_api'>__Pyx_XDECREF</span>(__pyx_t_1);
<span class='pyx_c_api'>__Pyx_AddTraceback</span>("_cython_magic_dbc2c06a712520185e24b7d477e83d8b.main_rectangles_not_so_fast", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = NULL;
__pyx_L0:;
<span class='refnanny'>__Pyx_XGIVEREF</span>(__pyx_r);
<span class='refnanny'>__Pyx_RefNannyFinishContext</span>();
return __pyx_r;
}
</pre><pre class="cython line score-0" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">20</span>: <span class="k">cdef</span> <span class="kt">int</span> <span class="nf">n_rectangles</span> <span class="o">=</span> <span class="mf">10000000</span></pre>
<pre class='cython code score-0 '> __pyx_v_n_rectangles = 0x989680;
</pre><pre class="cython line score-0" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">21</span>: <span class="k">cdef</span> <span class="kt">float</span> <span class="nf">threshold</span> <span class="o">=</span> <span class="mf">0.25</span></pre>
<pre class='cython code score-0 '> __pyx_v_threshold = 0.25;
</pre><pre class="cython line score-2" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">22</span>: <span class="k">cdef</span> <span class="kt">Pool</span> <span class="nf">mem</span> <span class="o">=</span> <span class="n">Pool</span><span class="p">()</span></pre>
<pre class='cython code score-2 '> __pyx_t_1 = <span class='pyx_c_api'>__Pyx_PyObject_CallNoArg</span>(((PyObject *)__pyx_ptype_5cymem_5cymem_Pool));<span class='error_goto'> if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 22, __pyx_L1_error)</span>
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_1);
__pyx_v_mem = ((struct __pyx_obj_5cymem_5cymem_Pool *)__pyx_t_1);
__pyx_t_1 = 0;
</pre><pre class="cython line score-0" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">23</span>: <span class="k">cdef</span> <span class="kt">Rectangle</span>* <span class="nf">rectangles</span> <span class="o">=</span> <span class="o"><</span><span class="n">Rectangle</span><span class="o">*></span><span class="n">mem</span><span class="o">.</span><span class="n">alloc</span><span class="p">(</span><span class="n">n_rectangles</span><span class="p">,</span> <span class="n">sizeof</span><span class="p">(</span><span class="n">Rectangle</span><span class="p">))</span></pre>
<pre class='cython code score-0 '> __pyx_t_2 = ((struct __pyx_vtabstruct_5cymem_5cymem_Pool *)__pyx_v_mem->__pyx_vtab)->alloc(__pyx_v_mem, __pyx_v_n_rectangles, (sizeof(struct __pyx_t_46_cython_magic_dbc2c06a712520185e24b7d477e83d8b_Rectangle)));<span class='error_goto'> if (unlikely(__pyx_t_2 == ((void *)NULL))) __PYX_ERR(0, 23, __pyx_L1_error)</span>
__pyx_v_rectangles = ((struct __pyx_t_46_cython_magic_dbc2c06a712520185e24b7d477e83d8b_Rectangle *)__pyx_t_2);
</pre><pre class="cython line score-0" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">24</span>: <span class="k">for</span> <span class="n">i</span> <span class="ow">in</span> <span class="nb">range</span><span class="p">(</span><span class="n">n_rectangles</span><span class="p">):</span></pre>
<pre class='cython code score-0 '> __pyx_t_3 = __pyx_v_n_rectangles;
__pyx_t_4 = __pyx_t_3;
for (__pyx_t_5 = 0; __pyx_t_5 < __pyx_t_4; __pyx_t_5+=1) {
__pyx_v_i = __pyx_t_5;
</pre><pre class="cython line score-29" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">25</span>: <span class="n">rectangles</span><span class="p">[</span><span class="n">i</span><span class="p">]</span><span class="o">.</span><span class="n">w</span> <span class="o">=</span> <span class="n">random</span><span class="p">()</span></pre>
<pre class='cython code score-29 '> __pyx_t_6 = <span class='pyx_c_api'>__Pyx_GetModuleGlobalName</span>(__pyx_n_s_random);<span class='error_goto'> if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 25, __pyx_L1_error)</span>
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_6);
__pyx_t_7 = NULL;
if (CYTHON_UNPACK_METHODS && unlikely(<span class='py_c_api'>PyMethod_Check</span>(__pyx_t_6))) {
__pyx_t_7 = <span class='py_macro_api'>PyMethod_GET_SELF</span>(__pyx_t_6);
if (likely(__pyx_t_7)) {
PyObject* function = <span class='py_macro_api'>PyMethod_GET_FUNCTION</span>(__pyx_t_6);
<span class='pyx_macro_api'>__Pyx_INCREF</span>(__pyx_t_7);
<span class='pyx_macro_api'>__Pyx_INCREF</span>(function);
<span class='pyx_macro_api'>__Pyx_DECREF_SET</span>(__pyx_t_6, function);
}
}
if (__pyx_t_7) {
__pyx_t_1 = <span class='pyx_c_api'>__Pyx_PyObject_CallOneArg</span>(__pyx_t_6, __pyx_t_7);<span class='error_goto'> if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 25, __pyx_L1_error)</span>
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_t_7); __pyx_t_7 = 0;
} else {
__pyx_t_1 = <span class='pyx_c_api'>__Pyx_PyObject_CallNoArg</span>(__pyx_t_6);<span class='error_goto'> if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 25, __pyx_L1_error)</span>
}
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_1);
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_t_6); __pyx_t_6 = 0;
__pyx_t_8 = __pyx_<span class='py_c_api'>PyFloat_AsFloat</span>(__pyx_t_1); if (unlikely((__pyx_t_8 == (float)-1) && <span class='py_c_api'>PyErr_Occurred</span>())) <span class='error_goto'>__PYX_ERR(0, 25, __pyx_L1_error)</span>
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_t_1); __pyx_t_1 = 0;
(__pyx_v_rectangles[__pyx_v_i]).w = __pyx_t_8;
</pre><pre class="cython line score-29" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">26</span>: <span class="n">rectangles</span><span class="p">[</span><span class="n">i</span><span class="p">]</span><span class="o">.</span><span class="n">h</span> <span class="o">=</span> <span class="n">random</span><span class="p">()</span></pre>
<pre class='cython code score-29 '> __pyx_t_6 = <span class='pyx_c_api'>__Pyx_GetModuleGlobalName</span>(__pyx_n_s_random);<span class='error_goto'> if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 26, __pyx_L1_error)</span>
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_6);
__pyx_t_7 = NULL;
if (CYTHON_UNPACK_METHODS && unlikely(<span class='py_c_api'>PyMethod_Check</span>(__pyx_t_6))) {
__pyx_t_7 = <span class='py_macro_api'>PyMethod_GET_SELF</span>(__pyx_t_6);
if (likely(__pyx_t_7)) {
PyObject* function = <span class='py_macro_api'>PyMethod_GET_FUNCTION</span>(__pyx_t_6);
<span class='pyx_macro_api'>__Pyx_INCREF</span>(__pyx_t_7);
<span class='pyx_macro_api'>__Pyx_INCREF</span>(function);
<span class='pyx_macro_api'>__Pyx_DECREF_SET</span>(__pyx_t_6, function);
}
}
if (__pyx_t_7) {
__pyx_t_1 = <span class='pyx_c_api'>__Pyx_PyObject_CallOneArg</span>(__pyx_t_6, __pyx_t_7);<span class='error_goto'> if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 26, __pyx_L1_error)</span>
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_t_7); __pyx_t_7 = 0;
} else {
__pyx_t_1 = <span class='pyx_c_api'>__Pyx_PyObject_CallNoArg</span>(__pyx_t_6);<span class='error_goto'> if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 26, __pyx_L1_error)</span>
}
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_1);
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_t_6); __pyx_t_6 = 0;
__pyx_t_8 = __pyx_<span class='py_c_api'>PyFloat_AsFloat</span>(__pyx_t_1); if (unlikely((__pyx_t_8 == (float)-1) && <span class='py_c_api'>PyErr_Occurred</span>())) <span class='error_goto'>__PYX_ERR(0, 26, __pyx_L1_error)</span>
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_t_1); __pyx_t_1 = 0;
(__pyx_v_rectangles[__pyx_v_i]).h = __pyx_t_8;
}
</pre><pre class="cython line score-0" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">27</span>: <span class="n">n_out</span> <span class="o">=</span> <span class="n">check_rectangles_cy</span><span class="p">(</span><span class="n">rectangles</span><span class="p">,</span> <span class="n">n_rectangles</span><span class="p">,</span> <span class="n">threshold</span><span class="p">)</span></pre>
<pre class='cython code score-0 '> __pyx_v_n_out = __pyx_f_46_cython_magic_dbc2c06a712520185e24b7d477e83d8b_check_rectangles_cy(__pyx_v_rectangles, __pyx_v_n_rectangles, __pyx_v_threshold);
</pre><pre class="cython line score-6" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">28</span>: <span class="k">print</span><span class="p">(</span><span class="n">n_out</span><span class="p">)</span></pre>
<pre class='cython code score-6 '> __pyx_t_1 = <span class='pyx_c_api'>__Pyx_PyInt_From_int</span>(__pyx_v_n_out);<span class='error_goto'> if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 28, __pyx_L1_error)</span>
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_1);
__pyx_t_6 = <span class='pyx_c_api'>__Pyx_PyObject_CallOneArg</span>(__pyx_builtin_print, __pyx_t_1);<span class='error_goto'> if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 28, __pyx_L1_error)</span>
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_6);
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_t_1); __pyx_t_1 = 0;
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_t_6); __pyx_t_6 = 0;
</pre></div></body></html>
</div>
</div>
</div>
</div>
</div>
<div class="cell border-box-sizing text_cell rendered"><div class="prompt input_prompt">
</div>
<div class="inner_cell">
<div class="text_cell_render border-box-sizing rendered_html">
<p>We can see that line 16 in the loop of <code>check_rectangles_cy</code> is highlighted, indicating that the Cython compiler had to add some Python API overhead.</p>
</div>
</div>
</div>
<div class="cell border-box-sizing text_cell rendered"><div class="prompt input_prompt">
</div>
<div class="inner_cell">
<div class="text_cell_render border-box-sizing rendered_html">
<h1 id="💫-Using-Cython-with-spaCy-to-speed-up NLP">💫 Using Cython with spaCy to speed up NLP<a class="anchor-link" href="#💫-Using-Cython-with-spaCy-to-speed-up NLP">¶</a></h1>
</div>
</div>
</div>
<div class="cell border-box-sizing text_cell rendered"><div class="prompt input_prompt">
</div>
<div class="inner_cell">
<div class="text_cell_render border-box-sizing rendered_html">
<p>Our blog post go in some details about the way spaCy can help you speed up your code by using Cython for NLP.</p>
<p>Here is a short summary of the post:</p>
<ul>
<li>the official Cython documentation advises against the use of C strings: <code>Generally speaking: unless you know what you are doing, avoid using C strings where possible and use Python string objects instead.</code></li>
<li>spaCy let us overcome this problem by:<ul>
<li>converting all strings to 64-bit hashes using a look up between Python unicode strings and 64-bit hashes called the <code>StringStore</code></li>
<li>giving us access to fully populated C level structures of the document and vocabulary called <code>TokenC</code> and <code>LexemeC</code></li>
</ul>
</li>
</ul>
<p>The <code>StringStore</code> object is accessible from everywhere in spaCy and every object (see on the left), for example as <code>nlp.vocab.strings</code>, <code>doc.vocab.strings</code> or <code>span.doc.vocab.string</code>:</p>
</div>
</div>
</div>
<div class="cell border-box-sizing text_cell rendered"><div class="prompt input_prompt">
</div>
<div class="inner_cell">
<div class="text_cell_render border-box-sizing rendered_html">
<p><img src="https://cdn-images-1.medium.com/max/600/1*nxvhI7mEc9A75PwMH-PSBg.png" alt="spaCy's internals" title="spaCy's internals"></p>
</div>
</div>
</div>
<div class="cell border-box-sizing text_cell rendered"><div class="prompt input_prompt">
</div>
<div class="inner_cell">
<div class="text_cell_render border-box-sizing rendered_html">
<p>Here is now a simple example of NLP processing in Cython.</p>
<p>First let's build a list of big documents and parse them using spaCy (this takes a few minutes):</p>
</div>
</div>
</div>
<div class="cell border-box-sizing code_cell rendered">
<div class="input">
<div class="prompt input_prompt">In [56]:</div>
<div class="inner_cell">
<div class="input_area">
<div class=" highlight hl-ipython3"><pre><span></span><span class="kn">import</span> <span class="nn">urllib.request</span>
<span class="kn">import</span> <span class="nn">spacy</span>
<span class="c1"># Build a dataset of 10 parsed document extracted from the Wikitext-2 dataset</span>
<span class="k">with</span> <span class="n">urllib</span><span class="o">.</span><span class="n">request</span><span class="o">.</span><span class="n">urlopen</span><span class="p">(</span><span class="s1">'https://raw.githubusercontent.com/pytorch/examples/master/word_language_model/data/wikitext-2/valid.txt'</span><span class="p">)</span> <span class="k">as</span> <span class="n">response</span><span class="p">:</span>
<span class="n">text</span> <span class="o">=</span> <span class="n">response</span><span class="o">.</span><span class="n">read</span><span class="p">()</span>
<span class="n">nlp</span> <span class="o">=</span> <span class="n">spacy</span><span class="o">.</span><span class="n">load</span><span class="p">(</span><span class="s1">'en'</span><span class="p">)</span>
<span class="n">doc_list</span> <span class="o">=</span> <span class="nb">list</span><span class="p">(</span><span class="n">nlp</span><span class="p">(</span><span class="n">text</span><span class="p">[:</span><span class="mi">800000</span><span class="p">]</span><span class="o">.</span><span class="n">decode</span><span class="p">(</span><span class="s1">'utf8'</span><span class="p">))</span> <span class="k">for</span> <span class="n">i</span> <span class="ow">in</span> <span class="nb">range</span><span class="p">(</span><span class="mi">10</span><span class="p">))</span>
</pre></div>
</div>
</div>
</div>
</div>
<div class="cell border-box-sizing text_cell rendered"><div class="prompt input_prompt">
</div>
<div class="inner_cell">
<div class="text_cell_render border-box-sizing rendered_html">
<p>We have about 1.7 million tokens ("words") in our dataset:</p>
</div>
</div>
</div>
<div class="cell border-box-sizing code_cell rendered">
<div class="input">
<div class="prompt input_prompt">In [57]:</div>
<div class="inner_cell">
<div class="input_area">
<div class=" highlight hl-ipython3"><pre><span></span><span class="nb">sum</span><span class="p">(</span><span class="nb">len</span><span class="p">(</span><span class="n">doc</span><span class="p">)</span> <span class="k">for</span> <span class="n">doc</span> <span class="ow">in</span> <span class="n">doc_list</span><span class="p">)</span>
</pre></div>
</div>
</div>
</div>
<div class="output_wrapper">
<div class="output">
<div class="output_area">
<div class="prompt output_prompt">Out[57]:</div>
<div class="output_text output_subarea output_execute_result">
<pre>1716200</pre>
</div>
</div>
</div>
</div>
</div>
<div class="cell border-box-sizing text_cell rendered"><div class="prompt input_prompt">
</div>
<div class="inner_cell">
<div class="text_cell_render border-box-sizing rendered_html">
<p>We want to perform some NLP task on this dataset.</p>
<p>For example, we would like to count the number of times the word "run" is used as a noun in the dataset (i.e. tagged with a "NN" Part-Of-Speech tag).</p>
<p>A Python loop to do that is short and straightforward:</p>
</div>
</div>
</div>
<div class="cell border-box-sizing code_cell rendered">
<div class="input">
<div class="prompt input_prompt">In [58]:</div>
<div class="inner_cell">
<div class="input_area">
<div class=" highlight hl-ipython3"><pre><span></span><span class="k">def</span> <span class="nf">slow_loop</span><span class="p">(</span><span class="n">doc_list</span><span class="p">,</span> <span class="n">word</span><span class="p">,</span> <span class="n">tag</span><span class="p">):</span>
<span class="n">n_out</span> <span class="o">=</span> <span class="mi">0</span>
<span class="k">for</span> <span class="n">doc</span> <span class="ow">in</span> <span class="n">doc_list</span><span class="p">:</span>
<span class="k">for</span> <span class="n">tok</span> <span class="ow">in</span> <span class="n">doc</span><span class="p">:</span>
<span class="k">if</span> <span class="n">tok</span><span class="o">.</span><span class="n">lower_</span> <span class="o">==</span> <span class="n">word</span> <span class="ow">and</span> <span class="n">tok</span><span class="o">.</span><span class="n">tag_</span> <span class="o">==</span> <span class="n">tag</span><span class="p">:</span>
<span class="n">n_out</span> <span class="o">+=</span> <span class="mi">1</span>
<span class="k">return</span> <span class="n">n_out</span>
<span class="k">def</span> <span class="nf">main_nlp_slow</span><span class="p">(</span><span class="n">doc_list</span><span class="p">):</span>
<span class="n">n_out</span> <span class="o">=</span> <span class="n">slow_loop</span><span class="p">(</span><span class="n">doc_list</span><span class="p">,</span> <span class="s1">'run'</span><span class="p">,</span> <span class="s1">'NN'</span><span class="p">)</span>
<span class="nb">print</span><span class="p">(</span><span class="n">n_out</span><span class="p">)</span>
</pre></div>
</div>
</div>
</div>
</div>
<div class="cell border-box-sizing code_cell rendered">
<div class="input">
<div class="prompt input_prompt">In [59]:</div>
<div class="inner_cell">
<div class="input_area">
<div class=" highlight hl-ipython3"><pre><span></span><span class="c1"># But it's also quite slow</span>
<span class="n">main_nlp_slow</span><span class="p">(</span><span class="n">doc_list</span><span class="p">)</span>
</pre></div>
</div>
</div>
</div>
<div class="output_wrapper">
<div class="output">
<div class="output_area">
<div class="prompt"></div>
<div class="output_subarea output_stream output_stdout output_text">
<pre>90
</pre>
</div>
</div>
</div>
</div>
</div>
<div class="cell border-box-sizing text_cell rendered"><div class="prompt input_prompt">
</div>
<div class="inner_cell">
<div class="text_cell_render border-box-sizing rendered_html">
<p>On my laptop this code takes about 1.4 second to get the answer.</p>
<p>Let's try to speed this up with spaCy and a bit of Cython.</p>
<p>First, we have to think about the data structure. We will need a C level array for the dataset, with pointers to each document's TokenC array. We'll also need to convert the strings we use for testing to 64-bit hashes: "run" and "NN". When all the data required for our processing is in C level objects, we can then iterate at full C speed over the dataset.</p>
<p>Here is how this example can be written in Cython with spaCy:</p>
</div>
</div>
</div>
<div class="cell border-box-sizing code_cell rendered">
<div class="input">
<div class="prompt input_prompt">In [71]:</div>
<div class="inner_cell">
<div class="input_area">
<div class=" highlight hl-ipython3"><pre><span></span><span class="o">%%</span><span class="n">cython</span> <span class="o">-+</span>
<span class="k">import</span> <span class="nn">numpy</span> <span class="c"># Sometime we have a fail to import numpy compilation error if we don't import numpy</span>
<span class="k">from</span> <span class="nn">cymem.cymem</span> <span class="k">cimport</span> <span class="n">Pool</span>
<span class="k">from</span> <span class="nn">spacy.tokens.doc</span> <span class="k">cimport</span> <span class="n">Doc</span>
<span class="k">from</span> <span class="nn">spacy.typedefs</span> <span class="k">cimport</span> <span class="n">hash_t</span>
<span class="k">from</span> <span class="nn">spacy.structs</span> <span class="k">cimport</span> <span class="n">TokenC</span>
<span class="k">cdef</span> <span class="k">struct</span> <span class="nf">DocElement</span><span class="p">:</span>
<span class="n">TokenC</span><span class="o">*</span> <span class="n">c</span>
<span class="nb">int</span> <span class="n">length</span>
<span class="k">cdef</span> <span class="kt">int</span> <span class="nf">fast_loop</span><span class="p">(</span><span class="n">DocElement</span><span class="o">*</span> <span class="n">docs</span><span class="p">,</span> <span class="nb">int</span> <span class="n">n_docs</span><span class="p">,</span> <span class="n">hash_t</span> <span class="n">word</span><span class="p">,</span> <span class="n">hash_t</span> <span class="n">tag</span><span class="p">):</span>
<span class="k">cdef</span> <span class="kt">int</span> <span class="nf">n_out</span> <span class="o">=</span> <span class="mf">0</span>
<span class="k">for</span> <span class="n">doc</span> <span class="ow">in</span> <span class="n">docs</span><span class="p">[:</span><span class="n">n_docs</span><span class="p">]:</span>
<span class="k">for</span> <span class="n">c</span> <span class="ow">in</span> <span class="n">doc</span><span class="o">.</span><span class="n">c</span><span class="p">[:</span><span class="n">doc</span><span class="o">.</span><span class="n">length</span><span class="p">]:</span>
<span class="k">if</span> <span class="n">c</span><span class="o">.</span><span class="n">lex</span><span class="o">.</span><span class="n">lower</span> <span class="o">==</span> <span class="n">word</span> <span class="ow">and</span> <span class="n">c</span><span class="o">.</span><span class="n">tag</span> <span class="o">==</span> <span class="n">tag</span><span class="p">:</span>
<span class="n">n_out</span> <span class="o">+=</span> <span class="mf">1</span>
<span class="k">return</span> <span class="n">n_out</span>
<span class="k">cpdef</span> <span class="nf">main_nlp_fast</span><span class="p">(</span><span class="n">doc_list</span><span class="p">):</span>
<span class="k">cdef</span> <span class="kt">int</span> <span class="nf">i</span><span class="p">,</span> <span class="nf">n_out</span><span class="p">,</span> <span class="nf">n_docs</span> <span class="o">=</span> <span class="nb">len</span><span class="p">(</span><span class="n">doc_list</span><span class="p">)</span>
<span class="k">cdef</span> <span class="kt">Pool</span> <span class="nf">mem</span> <span class="o">=</span> <span class="n">Pool</span><span class="p">()</span>
<span class="k">cdef</span> <span class="kt">DocElement</span>* <span class="nf">docs</span> <span class="o">=</span> <span class="o"><</span><span class="n">DocElement</span><span class="o">*></span><span class="n">mem</span><span class="o">.</span><span class="n">alloc</span><span class="p">(</span><span class="n">n_docs</span><span class="p">,</span> <span class="n">sizeof</span><span class="p">(</span><span class="n">DocElement</span><span class="p">))</span>
<span class="k">cdef</span> <span class="kt">Doc</span> <span class="nf">doc</span>
<span class="k">for</span> <span class="n">i</span><span class="p">,</span> <span class="n">doc</span> <span class="ow">in</span> <span class="nb">enumerate</span><span class="p">(</span><span class="n">doc_list</span><span class="p">):</span> <span class="c"># Populate our database structure</span>
<span class="n">docs</span><span class="p">[</span><span class="n">i</span><span class="p">]</span><span class="o">.</span><span class="n">c</span> <span class="o">=</span> <span class="n">doc</span><span class="o">.</span><span class="n">c</span>
<span class="n">docs</span><span class="p">[</span><span class="n">i</span><span class="p">]</span><span class="o">.</span><span class="n">length</span> <span class="o">=</span> <span class="p">(<</span><span class="kt">Doc</span><span class="p">></span><span class="n">doc</span><span class="p">)</span><span class="o">.</span><span class="n">length</span>
<span class="n">word_hash</span> <span class="o">=</span> <span class="n">doc</span><span class="o">.</span><span class="n">vocab</span><span class="o">.</span><span class="n">strings</span><span class="o">.</span><span class="n">add</span><span class="p">(</span><span class="s">'run'</span><span class="p">)</span>
<span class="n">tag_hash</span> <span class="o">=</span> <span class="n">doc</span><span class="o">.</span><span class="n">vocab</span><span class="o">.</span><span class="n">strings</span><span class="o">.</span><span class="n">add</span><span class="p">(</span><span class="s">'NN'</span><span class="p">)</span>
<span class="n">n_out</span> <span class="o">=</span> <span class="n">fast_loop</span><span class="p">(</span><span class="n">docs</span><span class="p">,</span> <span class="n">n_docs</span><span class="p">,</span> <span class="n">word_hash</span><span class="p">,</span> <span class="n">tag_hash</span><span class="p">)</span>
<span class="k">print</span><span class="p">(</span><span class="n">n_out</span><span class="p">)</span>
</pre></div>
</div>
</div>
</div>
</div>
<div class="cell border-box-sizing code_cell rendered">
<div class="input">
<div class="prompt input_prompt">In [72]:</div>
<div class="inner_cell">
<div class="input_area">
<div class=" highlight hl-ipython3"><pre><span></span><span class="n">main_nlp_fast</span><span class="p">(</span><span class="n">doc_list</span><span class="p">)</span>
</pre></div>
</div>
</div>
</div>
<div class="output_wrapper">
<div class="output">
<div class="output_area">
<div class="prompt"></div>
<div class="output_subarea output_stream output_stdout output_text">
<pre>90
</pre>
</div>
</div>
</div>
</div>
</div>
<div class="cell border-box-sizing text_cell rendered"><div class="prompt input_prompt">
</div>
<div class="inner_cell">
<div class="text_cell_render border-box-sizing rendered_html">
<p>The code is a bit longer because we have to declare and populate the C structures in <code>main_nlp_fast</code> before calling our Cython function.</p>
<p>But it is also a lot faster! In my Jupyter notebook, this cython code takes about 30 milliseconds to run on my laptop which is about <strong>50 times faster</strong> than our previous pure Python loop.</p>
</div>
</div>
</div>
<div class="cell border-box-sizing text_cell rendered"><div class="prompt input_prompt">
</div>
<div class="inner_cell">
<div class="text_cell_render border-box-sizing rendered_html">
<p>The absolute speed is also impressive for a module written in an interactive Jupyter Notebook and which can interface natively with other Python modules and functions: scanning ~1,7 million words in 30ms means we are processing <strong>a whopping 56 millions words per seconds</strong>.</p>
</div>
</div>
</div>
</div>
</div>
</body>
</html>
| 8 |
0 | hf_public_repos | hf_public_repos/100-times-faster-nlp/100-times-faster-nlp-in-python.ipynb | %load_ext Cythonfrom random import random
class Rectangle:
def __init__(self, w, h):
self.w = w
self.h = h
def area(self):
return self.w * self.h
def check_rectangles_py(rectangles, threshold):
n_out = 0
for rectangle in rectangles:
if rectangle.area() > threshold:
n_out += 1
return n_out
def main_rectangles_slow():
n_rectangles = 10000000
rectangles = list(Rectangle(random(), random()) for i in range(n_rectangles))
n_out = check_rectangles_py(rectangles, threshold=0.25)
print(n_out)%%time
# Let's run it:
main_rectangles_slow()%%cython
from cymem.cymem cimport Pool
from random import random
cdef struct Rectangle:
float w
float h
cdef int check_rectangles_cy(Rectangle* rectangles, int n_rectangles, float threshold):
cdef int n_out = 0
# C arrays contain no size information => we need to state it explicitly
for rectangle in rectangles[:n_rectangles]:
if rectangle.w * rectangle.h > threshold:
n_out += 1
return n_out
def main_rectangles_fast():
cdef int n_rectangles = 10000000
cdef float threshold = 0.25
cdef Pool mem = Pool()
cdef Rectangle* rectangles = <Rectangle*>mem.alloc(n_rectangles, sizeof(Rectangle))
for i in range(n_rectangles):
rectangles[i].w = random()
rectangles[i].h = random()
n_out = check_rectangles_cy(rectangles, n_rectangles, threshold)
print(n_out)%%time
main_rectangles_fast()%%cython -a
from cymem.cymem cimport Pool
from random import random
cdef struct Rectangle:
float w
float h
cdef int check_rectangles_cy(Rectangle* rectangles, int n_rectangles, float threshold):
cdef int n_out = 0
# C arrays contain no size information => we need to state it explicitly
for rectangle in rectangles[:n_rectangles]:
if rectangle.w * rectangle.h > threshold:
n_out += 1
return n_out
cpdef main_rectangles_fast():
cdef int n_rectangles = 10000000
cdef float threshold = 0.25
cdef Pool mem = Pool()
cdef Rectangle* rectangles = <Rectangle*>mem.alloc(n_rectangles, sizeof(Rectangle))
for i in range(n_rectangles):
rectangles[i].w = random()
rectangles[i].h = random()
n_out = check_rectangles_cy(rectangles, n_rectangles, threshold)
print(n_out)%%cython -a
from cymem.cymem cimport Pool
from random import random
cdef struct Rectangle:
float w
float h
cdef int check_rectangles_cy(Rectangle* rectangles, int n_rectangles, float threshold):
# ========== MODIFICATION ===========
# We changed the following line from `cdef int n_out = 0` to
n_out = 0
# n_out is not defined as an `int` anymore and is now thus a regular Python object
# ===================================
for rectangle in rectangles[:n_rectangles]:
if rectangle.w * rectangle.h > threshold:
n_out += 1
return n_out
cpdef main_rectangles_not_so_fast():
cdef int n_rectangles = 10000000
cdef float threshold = 0.25
cdef Pool mem = Pool()
cdef Rectangle* rectangles = <Rectangle*>mem.alloc(n_rectangles, sizeof(Rectangle))
for i in range(n_rectangles):
rectangles[i].w = random()
rectangles[i].h = random()
n_out = check_rectangles_cy(rectangles, n_rectangles, threshold)
print(n_out)import urllib.request
import spacy
# Build a dataset of 10 parsed document extracted from the Wikitext-2 dataset
with urllib.request.urlopen('https://raw.githubusercontent.com/pytorch/examples/master/word_language_model/data/wikitext-2/valid.txt') as response:
text = response.read()
nlp = spacy.load('en')
doc_list = list(nlp(text[:800000].decode('utf8')) for i in range(10))sum(len(doc) for doc in doc_list)def slow_loop(doc_list, word, tag):
n_out = 0
for doc in doc_list:
for tok in doc:
if tok.lower_ == word and tok.tag_ == tag:
n_out += 1
return n_out
def main_nlp_slow(doc_list):
n_out = slow_loop(doc_list, 'run', 'NN')
print(n_out)%%time
# But it's also quite slow
main_nlp_slow(doc_list)%%cython -+
import numpy # Sometime we have a fail to import numpy compilation error if we don't import numpy
from cymem.cymem cimport Pool
from spacy.tokens.doc cimport Doc
from spacy.typedefs cimport hash_t
from spacy.structs cimport TokenC
cdef struct DocElement:
TokenC* c
int length
cdef int fast_loop(DocElement* docs, int n_docs, hash_t word, hash_t tag):
cdef int n_out = 0
for doc in docs[:n_docs]:
for c in doc.c[:doc.length]:
if c.lex.lower == word and c.tag == tag:
n_out += 1
return n_out
cpdef main_nlp_fast(doc_list):
cdef int i, n_out, n_docs = len(doc_list)
cdef Pool mem = Pool()
cdef DocElement* docs = <DocElement*>mem.alloc(n_docs, sizeof(DocElement))
cdef Doc doc
for i, doc in enumerate(doc_list): # Populate our database structure
docs[i].c = doc.c
docs[i].length = (<Doc>doc).length
word_hash = doc.vocab.strings.add('run')
tag_hash = doc.vocab.strings.add('NN')
n_out = fast_loop(docs, n_docs, word_hash, tag_hash)
print(n_out)%%time
main_nlp_fast(doc_list) | 9 |
0 | hf_public_repos/audio-transformers-course/chapters/ko | hf_public_repos/audio-transformers-course/chapters/ko/chapter1/audio_data.mdx | # 오디오 데이터에 대하여[[introduction-to-audio-data]]
본질적으로 음파(sound wave)는 연속적인 신호입니다. 이는 어떤 주어진 시간에 대해 무한개의 신호 값을 가진다는 뜻입니다.
그런데 디지털 기기들은 유한개의 값들을 요구하기에 문제가 됩니다. 이런 디지털 기기에서의 처리, 저장, 전송을 위해 연속적인 음파들은 일련의 이산적인(discrete) 값, 즉 디지털 표현(digital representation)으로 변환되어야 합니다.
어떤 오디오 데이터셋에서건 텍스트 나레이션이나 음악같은 디지털 음향 파일들을 볼 수 있습니다. 이는 `.wav` (Waveform Audio File), `.flac` (Free Lossless Audio Codec), 그리고 `.mp3` (MPEG-1 Audio Layer 3)같이 다양한 포맷으로 접할 수 있습니다. 이 포맷들은 주로 오디오 신호의 디지털 표현을 압축하는 방식에서 차이가 있습니다.
연속적인 신호로부터 이러한 표현을 얻는 방법에 대해 알아봅시다. 아날로그 신호는 먼저 마이크에 의해 포착되어 음파에서 전기 신호로 변환됩니다. 이 전기 신호는 아날로그-디지털 컨버터(Analog-to-Dogital Converter)를 거치며 샘플링(sampling)을 통해 디지털 표현으로 디지털화됩니다.
## 샘플링과 샘플링 속도(sampling rate)[[sampling-and-sampling-rate]]
샘플링이란 연속적인 신호를 고정된 시간 간격으로 측정하는 과정입니다. 샘플링된 파형(waveform)은 균일한 간격으로 유한개의 신호 값을 가지므로 이산적입니다.
<div class="flex justify-center">
<img src="https://huggingface.co/datasets/huggingface-course/audio-course-images/resolve/main/Signal_Sampling.png" alt="Signal sampling illustration">
</div>
*Illustration from Wikipedia article: [Sampling (signal processing)](https://en.wikipedia.org/wiki/Sampling_(signal_processing))*
**샘플링 속도** (샘플링 주파수(sampling frequency)라고도 합니다)는 1초동안 수집된 샘플의 수로, 헤르츠(Hz)단위로 측정됩니다. 예를 들어 CD 품질의 오디오는 44,100 Hz의 샘플링 속도를 가지며 이는 초당 44,100번의 샘플이 수집된다는 뜻입니다. 고해상도 오디오는 192,000 Hz(혹은 192 kHz)의 샘플링 속도를 가집니다. 음성 모델 학습에 주로 쓰이는 샘플링 속도는 16,000 Hz(혹은 16 kHz)입니다.
샘플링 속도의 선택은 신호에서 얼마나 큰 주파수까지 캡처할 수 있는지를 결정하는데 큰 역할을 합니다. 신호에서 캡처가능한 최고 주파수의 한계는 샘플링 속도의 정확히 절반이며 이를 나이퀴스트 한계(Nyquist limit)라고 합니다. 예를 들면 사람의 음성에서 들을 수 있는 주파수는 8 kHz 미만이므로 16 kHz로 음성을 샘플링한다면 충분할 것입니다. 더 높은 샘플링 속도를 사용하여도 얻을 수 있는 추가 정보는 없을것이며 오히려 파일 처리에 대한 계산 비용만 증가시키게 됩니다. 반면, 너무 낮은 샘플링 속도는 정보의 손실로 이어집니다. 8 kHz로 샘플링된 음성은 높은 주파수들을 캡처할 수 없기때문에 흐릿하게 들릴것입니다.
오디오 작업을 할 때는 데이터셋에 있는 모든 오디오 예제가 동일한 샘플링 속도를 가지고 있는지 확인하는것이 중요합니다.
여러분의 커스텀 데이터로 사전 학습된 모델을 파인튜닝할 계획이라면 모델이 사전 학습된 데이터의 샘플링 속도와 여러분의 데이터의 샘플링 속도가 일치해야 합니다. 샘플링 속도는 샘플간의 시간 간격을 결정하며 오디오 데이터의 시간 해상도(time interval)에 영향을 주기 때문입니다. 예를 들어, 16,000 Hz의 샘플링 속도를 가진 5초 길이의 소리는 80,000개의 값으로 표현되지만, 동일한 5초 길이의 소리여도 8,000 Hz의 샘플링 속도를 가진다면 40,000개의 값으로 표현됩니다.
오디오 작업을 처리하는 트랜스포머 모델은 시퀀스를 취급하며 어텐션 메커니즘을 이용해 오디오 또는 멀티모달 표현을 학습합니다. 서로 다른 샘플링 속도를 갖는 오디오 데이터는 다른 시퀀스가 되므로 모델이 샘플링 속도간 일반화를 하기 어렵습니다.
**리샘플링**은 이런 서로 다른 샘플링 속도를 일치시켜주는 작업으로, 오디오 데이터 [전처리](preprocessing#resampling-the-audio-data) 과정 중 하나입니다.
## 진폭(amplitude)과 비트뎁스(bit depth)[[amplitude-and-bit-depth]]
샘플링 속도는 샘플을 얼마나 자주 뽑는지 알려줍니다. 그런데 이 샘플의 값은 정확히 어떤걸 뜻할까요?
소리는 사람이 들을 수 있는 주파수에서 기압의 변화로 만들어집니다. 소리의 **진폭**이란 특정 순간의 소리의 압력 수준을 나타내며 데시벨(dB)로 측정됩니다. 즉, 진폭은 소리의 세기를 뜻합니다.
예를 들어, 일반적인 말소리는 60 dB 미만입니다. 락 콘서트 같은 경우 약 125 dB로 사람의 청각적 한계를 뛰어넘을 수 있습니다.
디지털 오디오에서, 각 오디오 샘플은 특정 시점의 오디오 파동의 진폭을 기록합니다. 샘플의 **비트뎁스**는 이 진폭 값을 얼마나 정밀하게 기록할지를 정합니다. 비트뎁스가 높을수록 디지털 표현이 원래의 연속 음파에 더 가까워 집니다.
가장 일반적인 오디오 비트뎁스는 16비트와 24비트입니다. 이는 이진 용어(binary term)로, 진폭값을 연속값에서 이산값으로 변환할때 양자화(quantized)할 수 있는 수를 나타냅니다. 16비트 오디오의 경우 65,356개, 24비트 오디오의 경우 16,777,216개에 달합니다. 양자화는 연속값을 이산값으로 바꾸는 과정에서 반올림이 일어나므로 샘플링 과정은 노이즈가 발생합니다. 비트뎁스가 높을수록, 이런 양자화 노이즈는 작아집니다.
실무적으론 16비트 정도만 돼도 양자화 노이즈가 이미 들리지 않을 정도로 작아 이를 위해 굳이 더 높은 비트뎁스를 사용할 필요는 없습니다.
여러분은 32비트 오디오를 접할 수도 있습니다. 이는 값을 소수로 저장하기 위함으로, 16비트나 24비트 오디오는 값을 정수로 저장하는 반면 32비트는 샘플을 부동소수점 값으로 저장합니다. 32비트 부동 소수점 값의 정밀도는 24비트이므로 24비트 오디오와 같은 비트뎁스를 가지게 됩니다.
부동 소수점 오디오 샘플은 [-1.0, 1.0] 범위의 값을 가집니다. 머신 러닝 모델은 기본적으로 부동 소수점 데이터에서 작동하므로 학습 전 먼저 오디오를 부동 소수점 형식으로 변환해야 합니다. 이는 다음 [전처리](preprocessing) 섹션에서 살펴보겠습니다.
연속적인 오디오 신호와 마찬가지로 디지털 오디오의 진폭 역시 일반적으로 데시벨(dB)로 표시됩니다. 인간의 청각은 기본적으로 로그함수를 따르므로(우리 귀는 큰 소리보다 조용한 소리의 작은 변동에 더 민감합니다) 진폭이 데시벨(마찬가지로 로그함수를 따릅니다)로 표시되면 소리의 크기를 해석하기 쉽습니다.
실생활에서 쓰이는 오디오 데시벨 단위는 사람이 들을 수 있는 가장 조용한 소리인 0 dB에서 시작하여 소리가 커질수록 값도 커집니다. 그러나 디지털 오디오 신호의 경우, 0 dB이 가장 큰 진폭이며 다른 모든 진폭은 음수값을 가집니다(디지털에서의 데시벨 단위는 dBFS, 실생활에서 주로 쓰이는 데시벨은 dBSPL로 서로 다릅니다). 간단한 규칙으로 -6 dB마다 진폭이 절반으로 줄어들고 -60 dB 미만은 일반적으로 볼륨을 크게 높이지 않는 한 들을 수 없다고 보시면 됩니다.
## 파형(waveform)으로써의 오디오[[audio-as-a-waveform]]
여러분은 아마 소리가 **파형**으로 시각화된것을 본 적이 있으실 겁니다. 이는 시간에 따른 샘플 값들을 그래프로 표현하여 소리의 진폭 변화를 보여줍니다. 이를 소리의 *시간 영역(time domain)* 표현이라고도 합니다.
이러한 유형의 사각화는 특정 사운드 이벤트가 언제 발생했는지, 전체적인 음량은 어떤지, 오디오에 어떤 노이즈나 불규칙성이 있는지 등 오디오 신호의 특징을 식별하는데 유용합니다.
오디오 신호의 파형을 그리기 위해 파이썬 라이브러리 `librosa`를 이용할 수 있습니다:
```bash
pip install librosa
```
라이브러리에서 제공하는 "트럼펫" 소리를 예로 들어 보겠습니다:
```py
import librosa
array, sampling_rate = librosa.load(librosa.ex("trumpet"))
```
이 예제는 오디오 시계열(`array`)과 샘플링 속도(`sampling_rate`) 튜플을 불러옵니다.
이 사운드의 파형을 `librosa`의 `waveshow()` 함수를 통해 살펴보겠습니다:
```py
import matplotlib.pyplot as plt
import librosa.display
plt.figure().set_figwidth(12)
librosa.display.waveshow(array, sr=sampling_rate)
```
<div class="flex justify-center">
<img src="https://huggingface.co/datasets/huggingface-course/audio-course-images/resolve/main/waveform_plot.png" alt="Waveform plot">
</div>
이 그래프의 X축은 시간을, Y축은 진폭을 나타냅니다. 각 점은 이 소리를 샘플링할때 취한 값에 해당합니다. `librosa`가 이미 오디오를 부동소수점 값으로 변환했으며 진폭값이 [-1.0, 1.0] 범위내에 있다는 점을 유의하세요.
이런 시각화는 오디오를 듣는것과 더불어 작업할 데이터를 이해하는데 유용한 도구가 될 수 있습니다.
이를 통해 신호의 모양, 패턴 관찰, 노이즈나 왜곡을 발견할 수 있습니다. 만약 정규화(normalization), 리샘플링, 필터링 등의 방법을 통해 데이터 전처리를 했다면, 이런 전처리가 제대로 되었는지 시각적으로 확인할 수도 있습니다.
또한 모델의 학습이 완료된 후에 어디서 오류가 발생했는지 디버깅하기위해 샘플을 시각화할 수도 있습니다(예를 들어, 오디오 분류 작업 문제 등에서).
## 주파수 스펙트럼(frequency spectrum)[[the-frequency-spectrum]]
오디오 데이터를 시각화하는 또 다른 방법은 오디오 신호의 **주파수 스펙트럼**을 그리는 것입니다. 이는 *주파수 영역(frequency domain)* 표현이라고도 합니다.
스펙트럼은 이산 푸리에 변환(DFT)을 사용하여 계산할 수 있습니다. 이를 통해 신호를 구성하고 있는 각각의 주파수들과 그 세기를 알 수 있습니다.
`numpy`의 `rfft()` 함수를 쓰면 DFT를 계산할 수 있습니다. 이를 아까의 트럼펫 소리에 적용시켜 주파수 스펙트럼을 그려봅시다. 전체 소리의 스펙트럼을 그릴 수도 있지만, 그보다는 작은 영역에 집중하는것이 더 낫습니다. 여기서는 첫 4096개의 샘플에 적용시켜보겠습니다. 이는 대략적으로 연주의 첫 음표의 길이에 해당합니다:
```py
import numpy as np
dft_input = array[:4096]
# calculate the DFT
window = np.hanning(len(dft_input))
windowed_input = dft_input * window
dft = np.fft.rfft(windowed_input)
# get the amplitude spectrum in decibels
amplitude = np.abs(dft)
amplitude_db = librosa.amplitude_to_db(amplitude, ref=np.max)
# get the frequency bins
frequency = librosa.fft_frequencies(sr=sampling_rate, n_fft=len(dft_input))
plt.figure().set_figwidth(12)
plt.plot(frequency, amplitude_db)
plt.xlabel("Frequency (Hz)")
plt.ylabel("Amplitude (dB)")
plt.xscale("log")
```
<div class="flex justify-center">
<img src="https://huggingface.co/datasets/huggingface-course/audio-course-images/resolve/main/spectrum_plot.png" alt="Spectrum plot">
</div>
이 그래프는 이 오디오 구간에 존재하는 다양한 주파수의 세기를 보여줍니다. 보통 X축에 로그스케일로 주파수를, Y축엔 진폭을 표시합니다.
이 주파수 스펙트럼 그래프는 여러개의 피크를 보여주는데, 이 피크들은 연주중인 음표의 고조파(harmonic)에 해당하며 더 높은 고조파는 더 작은 소리를 나타냅니다. 첫번째 피크가 약 620 Hz에 있으므로 이 스펙트럼은 E♭ 음표의 주파수 스펙트럼인것을 알 수 있습니다.
DFT의 결과값은 복소수 배열입니다. `np.abs(dft)`로 그 크기를 구하면 스펙트로그램의 진폭 정보를 알 수 있습니다. 실수부와 허수부 사이의 각도는 위상 스펙트럼(phase spectrum)을 나타내지만, 머신러닝에선 쓰이지 않는 경우가 종종 있습니다.
`librosa.amplitude_to_db()`는 진폭값을 데시벨 스케일로 변환합니다. 이로 인해 스펙트럼의 더욱 세밀한 부분까지 쉽게 확인이 가능합니다. 때때로는 **파워 스펙트럼(power spectrum)**을 쓸 때도 있습니다. 진폭보다 에너지를 측정하기 위해 쓰는데, 이는 단지 진폭에 제곱을 취한 값으로 나타낸 스펙트럼입니다.
<Tip>
💡 실무에서 사람들이 고속 푸리에 변환(FFT)이라는 용어와 DFT를 혼용해서 쓰는 경우를 볼 수 있을겁니다. 이는 DFT를 계산하는 유일한 효율적인 방법이 FFT를 사용하는것이기 때문입니다.</Tip>
오디오 신호의 파형과 주파수 스펙트럼은 동일한 정보를 지닙니다. 단지 같은 데이터(여기서는 트럼펫 소리의 첫 4096개의 샘플)를 바라보는 두 가지 방법일 뿐입니다. 파형은 시간에 따른 오디오 신호의 진폭을 표시하며, 스펙트럼은 고정된 시점의 개별 주파수들의 진폭을 시각화합니다.
## 스펙트로그램(spectrogram)[[spectrogram]]
오디오 신호에서 주파수가 어떻게 변화하는지 보려면 무엇을 해야 할까요? 트럼펫 소리는 여러 음으로 구성돼있어서 여러 다른 주파수들로 이뤄져있습니다. 스펙트럼의 문제는 주어진 한 순간만의 주파수들을 보여준다는 것입니다.
이에 대한 해결법은 시간을 작은 구간들로 나누어 DFT를 적용하고, 그 결과인 스펙트럼들을 쌓아 **스펙트로그램**을 만드는 것입니다.
스펙트로그램은 오디오 신호의 주파수를 시간에 따라 변화하는 형태로 그립니다. 이를 통해 시간, 주파수, 진폭을 그래프에서 한눈에 볼 수 있습니다. 이 계산을 수행하는 알고리즘을 STFT(Short Time Fourier Transform)라 합니다.
스펙트로그램은 오디오를 다루는데 가장 유용한 툴 중 하나입니다. 예를 들어, 음악 녹음 작업을 다룰 때 다양한 악기와 보컬 트랙이 어떻게 전체 사운드에 기여하는지 볼 수 있습니다. 음성 작업의 경우, 모음을 발음할 때 각각의 모음들은 고유 주파수가 있기 때문에 서로 다른 모음인것을 식별할 수 있습니다.
`librosa`의 `stft()`와 `specshow()` 함수를 이용해 트럼펫 소리의 스펙트로그램을 그려보겠습니다:
```py
import numpy as np
D = librosa.stft(array)
S_db = librosa.amplitude_to_db(np.abs(D), ref=np.max)
plt.figure().set_figwidth(12)
librosa.display.specshow(S_db, x_axis="time", y_axis="hz")
plt.colorbar()
```
<div class="flex justify-center">
<img src="https://huggingface.co/datasets/huggingface-course/audio-course-images/resolve/main/spectrogram_plot.png" alt="Spectrogram plot">
</div>
이 그래프에서 X축은 (파형 그래프처럼)시간을 나타내며 Y축은 주파수를 Hz 단위로 나타냅니다.
색상의 강도는 각 시점의 주파수 성분의 진폭 또는 파워를 데시벨(dB)로 측정하여 나타냅니다.
스펙트로그램은 일반적으로 오디오 신호의 몇 밀리초 정도 되는 짧은 구간에 DFT를 적용하여 주파수 스펙트럼들을 얻어 만들어집니다. 이 스펙트럼들을 시간축으로 쌓은것이 스펙트로그램이기 때문입니다.
이 이미지에서 각각의 수직 조각들은 (아까 위에서 본)주파수 스펙트럼에 해당합니다. 기본적으로, `librosa.stft()`는 오디오 신호를 2048개의 샘플로 나눕니다. 주파수 해상도(frequency resolution)와 시간 해상도(time resolution) 사이의 적절한 절충(trade-off)이기 때문입니다.
스펙트로그램과 파형은 같은 데이터를 다른 방식으로 볼 뿐이므로, 스펙트로그램을 다시 원래의 파형으로 돌리는 역 STFT(inverse STFT)가 가능합니다. 그러나, 이를 위해선 진폭 정보뿐만 아니라 위상(phase) 정보 또한 필요한데, 스펙트로그램이 머신러닝 툴에 의해 생성됐다면 대부분 단순히 진폭만 출력하게 됩니다.
이런 경우, 위상 재구성 알고리즘(phase reconstruction algorithm)인 vocoder라는 신경망이나 고전적인 Griffin-Lim 알고리즘을 사용하여 스펙트로그램에서 파형을 재구성할 수 있습니다.
스펙트로그램은 단순히 시각화를 위해서만 사용되는게 아닙니다. 많은 머신러닝 모델들은 (파형과는 다르게)스펙트로그램 그 자체를 입력으로 받고 출력 또한 스펙트로그램으로 내는 경우도 있습니다.
이제 스펙트로그램이 무엇이고 어떻게 만들어지는지 알았으니, 음성 작업에 사용되는 이 스펙트로그램의 변형에 대해 알아봅시다: 멜 스펙트로그램(mel spectrogram)입니다.
## 멜 스펙트로그램[[mel-spectrogram]]
멜 스펙트로그램은 스펙트로그램의 한 종류로 음성 작업이나 머신러닝 작업에 주로 쓰입니다.
오디오 신호를 시간에 따른 주파수로 보여준다는 점에서 스펙트로그램과 비슷하지만, 다른 주파수 축을 사용합니다.
표준적인 스펙트로그램에선 주파수 축이 선형(linear)이며 헤르츠(Hz)단위로 측정됩니다. 그러나, 사람의 청각 시스템은 고주파보다 저주파에 더 민감하며, 이 민감성은 주파수가 증가함에 따라 로그함수적으로 감소합니다. 멜 스케일(mel scale)은 이런 사람의 비선형 주파수 반응을 근사한(approximate) 지각 스케일(perceptual scale)입니다.
멜 스펙트로그램을 만드려면 전처럼 STFT를 사용하고 오디오를 여러 짧은 구간으로 나눠 일련의 주파수 스펙트럼들을 얻어야 합니다. 그 후 추가적으로, 각 스펙트럼에 mel filterbank라고 불리는 필터들을 적용시켜 주파수를 멜 스케일로 변환합니다.
이 모든 단계를 대신 해주는 `librosa`의 `melspectrogram()` 함수를 이용하여 멜 스펙트로그램을 그려봅시다:
```py
S = librosa.feature.melspectrogram(y=array, sr=sampling_rate, n_mels=128, fmax=8000)
S_dB = librosa.power_to_db(S, ref=np.max)
plt.figure().set_figwidth(12)
librosa.display.specshow(S_dB, x_axis="time", y_axis="mel", sr=sampling_rate, fmax=8000)
plt.colorbar()
```
<div class="flex justify-center">
<img src="https://huggingface.co/datasets/huggingface-course/audio-course-images/resolve/main/mel-spectrogram.png" alt="Mel spectrogram plot">
</div>
위의 예에서, `n_mels`는 mel band의 수를 정합니다. mel band는 필터를 이용해 스펙트럼을 지각적으로 의미있는 요소로 나누는 주파수 범위의 집합을 정의합니다. 이 필터들의 모양(shape)과 간격(spacing)은 사람의 귀가 다양한 주파수에 반응하는 방식을 모방하도록 선택됩니다. 흔히 `n_mels`의 값으로 40 또는 80이 선택됩니다. `fmax`는 우리가 관심을 가지는 최고 주파수(Hz 단위)를 나타냅니다.
일반적인 스펙트로그램과 마찬가지로 멜 스펙트로그램의 주파수 성분 역시 세기를 데시벨로 표현하는 것이 일반적입니다. 데시벨로의 변환이 로그 연산을 포함하기 때문에 이를 흔히 **로그-멜 스펙트로그램(log-mel spectrogram)**이라 합니다. 위 예제에선 `librosa.power_to_db()`를 썻는데, 이는 `librosa.feature.melspectrogram()`는 파워 스펙트로그램(power spectrogram)을 만들기 때문입니다.
<Tip>
💡 모든 멜 스펙트로그램이 같은 것은 아닙니다! 일반적으로 사용되는 멜 스케일에는 두가지가 있으며("htk"와 "slaney"), 파워 스펙트로그램 대신 진폭 스펙트로그램(amplitude spectrogram)이 사용될 수도 있습니다. 로그-멜 스펙트로그램으로의 변환은 항상 실제 데시벨을 계산하는 것이 아니며 단순히 로그를 취할 수도 있습니다. 따라서, 머신러닝 모델이 멜 스펙트로그램을 입력으로 받는다면, 같은 방식으로 계산하고 있는지 다시 한번 확인하시길 바랍니다.
</Tip>
멜 스펙트로그램은 신호를 필터링하여 만들기 때문에 정보의 손실이 일어납니다. 따라서 멜 스펙트로그램을 다시 원래의 파형으로 바꾸는 것은 일반적인 스펙트로그램을 다시 되돌리는것보다 힘든 일입니다. 버려진 주파수를 어떻게든 추정해야 하기 때문이죠. 멜 스펙트로그램을 다시 원래의 파형으로 되돌리기 위해 HiFiGAN vocoder같은 머신러닝 모델이 필요한 이유이기도 합니다.
기본적인 스펙트럼과 비교하여, 멜 스펙트로그램은 인간의 지각에 더 의미 있는 오디오 신호의 특성을 포착할 수 있어 음성 인식, 화자 식별, 음악 장르 분류 같은 작업에서 널리 사용됩니다.
이제 오디오 데이터의 시각화 방법을 알았으니 여러분이 좋아하는 소리가 어떻게 보이는지 한번 확인해보세요. :)
| 0 |
0 | hf_public_repos/audio-transformers-course/chapters/ko | hf_public_repos/audio-transformers-course/chapters/ko/chapter1/load_and_explore.mdx | # 오디오 데이터셋 불러오기 및 탐색하기[[load-and-explore-an-audio-dataset]]
이 코스에서 우리는 🤗 Datasets 라이브러리를 사용하여 오디오 데이터셋을 다룹니다.🤗 Datasets는 오디오를 포함한 모든 양식의 데이터셋을 다운로드하고 준비할 수 있는 오픈소스 라이브러리입니다.
이 라이브러리는 허깅페이스 허브에서 공개적으로 사용 가능한 머신러닝 데이터셋에 쉽게 접근할 수 있도록 도와줍니다. 무엇보다도, 🤗 Datasets는 연구자와 실무자 모두가 오디오 데이터셋을 쉽게 다룰 수 있도록 만드는 특화 기능들을 여럿 가지고 있습니다.
시작에 앞서, 🤗 Datasets 라이브러리가 설치돼있는지 확인해주세요:
```bash
pip install datasets[audio]
```
🤗 Datasets의 주된 기능 중 하나는 `load_dataset()` 파이썬 코드 한줄로 데이터셋을 다운로드하고 준비할 수 있다는것입니다.
[MINDS-14](https://huggingface.co/datasets/PolyAI/minds14)라고 하는 오디오 데이터셋을 불러와 다뤄봅시다. 이 데이터셋은 여러 언어와 방언으로 사람들이 인터넷뱅킹에 대해 묻는 내용이 녹음돼있습니다.
MINDS-14 데이터셋을 불러오기 위해 허브에서의 데이터셋 식별자인 (`PolyAI/minds14`)를 `load_dataset` 함수로 넘겨줘야 합니다. 또한 데이터셋의 호주 부분(`en-AU`)에만 관심이 있고 학습용 데이터에만 관심이 있음을 명시하겠습니다.:
```py
from datasets import load_dataset
minds = load_dataset("PolyAI/minds14", name="en-AU", split="train")
minds
```
**Output:**
```out
Dataset(
{
features: [
"path",
"audio",
"transcription",
"english_transcription",
"intent_class",
"lang_id",
],
num_rows: 654,
}
)
```
데이터셋은 654개의 오디오 파일로 이뤄져있습니다. 각각의 파일들은 자막, 영어 번역, 그리고 질문자의 의도를 나타내는 레이블(label)이 함께 제공됩니다. 오디오 컬럼(column)에는 원시(raw) 오디오 데이터가 포함되어 있습니다. 예제를 하나 살펴보겠습니다:
```py
example = minds[0]
example
```
**Output:**
```out
{
"path": "/root/.cache/huggingface/datasets/downloads/extracted/f14948e0e84be638dd7943ac36518a4cf3324e8b7aa331c5ab11541518e9368c/en-AU~PAY_BILL/response_4.wav",
"audio": {
"path": "/root/.cache/huggingface/datasets/downloads/extracted/f14948e0e84be638dd7943ac36518a4cf3324e8b7aa331c5ab11541518e9368c/en-AU~PAY_BILL/response_4.wav",
"array": array(
[0.0, 0.00024414, -0.00024414, ..., -0.00024414, 0.00024414, 0.0012207],
dtype=float32,
),
"sampling_rate": 8000,
},
"transcription": "I would like to pay my electricity bill using my card can you please assist",
"english_transcription": "I would like to pay my electricity bill using my card can you please assist",
"intent_class": 13,
"lang_id": 2,
}
```
오디오 컬럼에 여러 feature가 있음을 알 수 있습니다. 각각은 다음과 같습니다:
* `path`: 오디오 파일의 경로(이 예제의 경우 `*.wav`).
* `array`: 1차원 넘파이 배열로 디코딩된 오디오 데이터.
* `sampling_rate`: 오디오 파일의 샘플링 속도(이 예제의 경우 8,000 Hz).
`intent_class`는 오디오 녹음이 어떤 카테고리로 분류되는지 나타냅니다. 이 숫자를 의미있는 문자열로 바꾸려면 `int2str()` 메소드를 사용하세요:
```py
id2label = minds.features["intent_class"].int2str
id2label(example["intent_class"])
```
**Output:**
```out
"pay_bill"
```
`transcription` feature를 보면 누군가가 청구서를 지불하는 것에 대해 질문하는 녹음의 오디오 파일이란것을 알 수 있습니다.
이 데이터의 부분집합에 대해 오디오 분류기(classifier)를 학습시킬 계획이시라면, 이 모든 feature가 필요하진 않을지도 모릅니다. 예를 들어, `lang_id`는 모든 예제에서 같은 값을 지니기 때문에 그다지 쓸모 있지 않을겁니다. `english_transcription`는 이 호주데이터에선 대부분이 `transcription`과 같을테니 삭제해도 좋을겁니다.
이런 관계없는 feature는 🤗 Datasets의 `remove_columns` 메소드를 사용하면 쉽게 제거할 수 있습니다:
```py
columns_to_remove = ["lang_id", "english_transcription"]
minds = minds.remove_columns(columns_to_remove)
minds
```
**Output:**
```out
Dataset({features: ["path", "audio", "transcription", "intent_class"], num_rows: 654})
```
이제 데이터셋의 원시 컨텐츠들을 불러오고 살펴봤으니, 몇개를 들어봅시다!
`Gradio`의 `Blocks`와 `Audio` 기능을 써서 데이터셋의 샘플 몇개를 무작위로 디코딩해보겠습니다:
```py
import gradio as gr
def generate_audio():
example = minds.shuffle()[0]
audio = example["audio"]
return (
audio["sampling_rate"],
audio["array"],
), id2label(example["intent_class"])
with gr.Blocks() as demo:
with gr.Column():
for _ in range(4):
audio, label = generate_audio()
output = gr.Audio(audio, label=label)
demo.launch(debug=True)
```
원하신다면, 예제 몇개를 시각화해보는 것도 좋습니다. 첫번째 예제의 파형을 그려보겠습니다.
```py
import librosa
import matplotlib.pyplot as plt
import librosa.display
array = example["audio"]["array"]
sampling_rate = example["audio"]["sampling_rate"]
plt.figure().set_figwidth(12)
librosa.display.waveshow(array, sr=sampling_rate)
```
<div class="flex justify-center">
<img src="https://huggingface.co/datasets/huggingface-course/audio-course-images/resolve/main/waveform_unit1.png" alt="Waveform plot">
</div>
한번 해보세요! MINDS-14 데이터셋의 다른 언어나 방언을 다운로드하여 듣고 시각화를 해보세요. 전체 데이터셋이 어떨지 대략적인 감을 줄겁니다. [여기](https://huggingface.co/datasets/PolyAI/minds14)서 가능한 언어들이 무엇인지 확인하실 수 있습니다.
| 1 |
0 | hf_public_repos/audio-transformers-course/chapters/unpublished | hf_public_repos/audio-transformers-course/chapters/unpublished/chapter9/audioldm.mdx | # AudioLDM | 2 |
0 | hf_public_repos/audio-transformers-course/chapters/unpublished | hf_public_repos/audio-transformers-course/chapters/unpublished/chapter9/riffusion.mdx | # Riffusion | 3 |
0 | hf_public_repos/audio-transformers-course/chapters/unpublished | hf_public_repos/audio-transformers-course/chapters/unpublished/chapter9/dance_diffusion.mdx | # Dance diffusion | 4 |
0 | hf_public_repos/audio-transformers-course/chapters/unpublished | hf_public_repos/audio-transformers-course/chapters/unpublished/chapter9/music_generation.mdx | # Music generation | 5 |
0 | hf_public_repos/audio-transformers-course/chapters/unpublished | hf_public_repos/audio-transformers-course/chapters/unpublished/chapter9/introduction.mdx | # TODO
| 6 |
0 | hf_public_repos/audio-transformers-course/chapters | hf_public_repos/audio-transformers-course/chapters/ru/_toctree.yml | - title: Раздел 0. Добро пожаловать на курс!
sections:
- local: chapter0/introduction
title: Что ожидать от курса
- local: chapter0/get_ready
title: Подготовка
- local: chapter0/community
title: Присоединиться к сообществу
- title: Раздел 1. Работа с аудиоданными
sections:
- local: chapter1/introduction
title: Чему вы научитесь
- local: chapter1/audio_data
title: Введение в аудиоданные
- local: chapter1/load_and_explore
title: Загрузка и исследование набора аудиоданных
- local: chapter1/preprocessing
title: Предварительная обработка
- local: chapter1/streaming
title: Потоковая передача аудиоданных
- local: chapter1/quiz
title: Контрольный опрос
quiz: 1
- local: chapter1/supplemental_reading
title: Дополнительные материалы и ресурсы
- title: Раздел 2. Краткое введение в аудиоприложения
sections:
- local: chapter2/introduction
title: Обзор аудио приложений
- local: chapter2/audio_classification_pipeline
title: Классификация звука с помощью конвейера
- local: chapter2/asr_pipeline
title: Автоматическое распознавание речи с помощью конвейера
- local: chapter2/hands_on
title: Практическое занятие
- title: Раздел 3. Архитектуры трансформеров для аудио
sections:
- local: chapter3/introduction
title: Освежим в памяти модели трансформеров
- local: chapter3/ctc
title: Архитектуры CTC
- local: chapter3/seq2seq
title: Архитектуры Seq2Seq
- local: chapter3/classification
title: Архитектуры классификации аудио
- local: chapter3/quiz
title: Контрольный опрос
quiz: 3
- local: chapter3/supplemental_reading
title: Дополнительные материалы и ресурсы
- title: Раздел 4. Создание классификатора музыкальных жанров
sections:
- local: chapter4/introduction
title: Чему вы научитесь и что создадите
- local: chapter4/classification_models
title: Предварительно обученные модели классификации звука
- local: chapter4/fine-tuning
title: Дообучение модели классификации музыки
- local: chapter4/demo
title: Создание демо с помощью Gradio
- local: chapter4/hands_on
title: Практическое занятие
- title: Раздел 5. Automatic Speech Recognition
sections:
- local: chapter5/introduction
title: Чему вы научитесь и что создадите
- local: chapter5/asr_models
title: Предварительно обученные модели для распознавания речи
- local: chapter5/choosing_dataset
title: Выбор набора данных
- local: chapter5/evaluation
title: Оценка и метрики для распознавания речи
- local: chapter5/fine-tuning
title: Как дообучить ASR-систему с помощью Trainer API
- local: chapter5/demo
title: Создание демо
- local: chapter5/hands_on
title: Практическое занятие
- local: chapter5/supplemental_reading
title: Дополнительные материалы и ресурсы
- title: Раздел 6. От текста к речи
sections:
- local: chapter6/introduction
title: Чему вы научитесь и что создадите
- local: chapter6/tts_datasets
title: Наборы данных Text-to-speech
- local: chapter6/pre-trained_models
title: Предварительно обученные модели text-to-speech
- local: chapter6/fine-tuning
title: Дообучение SpeechT5
- local: chapter6/evaluation
title: Оценка моделей text-to-speech
- local: chapter6/hands_on
title: Практическое занятие
- local: chapter6/supplemental_reading
title: Дополнительные материалы и ресурсы
- title: Раздел 7. Собираем все воедино
sections:
- local: chapter7/introduction
title: Чему вы научитесь и что создадите
- local: chapter7/speech-to-speech
title: Перевод Speech-to-speech
- local: chapter7/voice-assistant
title: Создание голосового ассистента
- local: chapter7/transcribe-meeting
title: Транскрибирование встречи
- local: chapter7/hands-on
title: Практическое занятие
- local: chapter7/supplemental_reading
title: Дополнительные материалы и ресурсы
- title: Раздел 8. Финишная прямая
sections:
- local: chapter8/introduction
title: Поздравляем!
- local: chapter8/certification
title: Получение сертификата о прохождении курса
- title: Мероприятия в рамках курса
sections:
- local: events/introduction
title: Живые занятия и семинары | 7 |
0 | hf_public_repos/audio-transformers-course/chapters | hf_public_repos/audio-transformers-course/chapters/ru/translation_agreements.txt | Английский вариант Русский вариант
unit, section раздел
chapter глава
Quiz Контрольный опрос
Supplemental reading Дополнительные материалы
state-of-the-art передовых
demo демонстрация (демо)
transcribe транскрибирует
NLP* NLP
Sanchit Gandhi Санчит Ганди
Matthijs Hollemans Маттиджс Холлеманс
Maria Khalusova Мария Халусова
Vaibhav Srivastav Вайбхав Шривастав
Developer Advocate Специалист по продвижению интересов разработчиков (как наиболее близкое по смыслу)
Sign up зарегестрироваться
Sampling Выборка (сэмлирование, дискретизация) ВАЖНО! В описании процесса дескретизации звука это дескретизаци
sampling rate частота семплировани/частота дискретизации (в описании процесса преобразования аналогового звука в цифровой)
subsampling сабсэмплинг
fine-tune дообучения (аналогично и все производные формы)
Resampling передискретизация
preprocessing препроцессинг (предварительная обработка)
bit depth битовая глубина
audio sample** пример/образец/выборка аудио
waveform Форма волны (https://ru.wikipedia.org/wiki/%D0%A4%D0%BE%D1%80%D0%BC%D0%B0_%D0%B2%D0%BE%D0%BB%D0%BD%D1%8B)
Time domain Временная область (https://ru.wikipedia.org/wiki/%D0%92%D1%80%D0%B5%D0%BC%D0%B5%D0%BD%D0%BD%D0%B0%D1%8F_%D0%BE%D0%B1%D0%BB%D0%B0%D1%81%D1%82%D1%8C)
time series временной ряд
frequency spectrum Частотный спектр
Frequency domain Частотный интервал (https://ru.wikipedia.org/wiki/%D0%A7%D0%B0%D1%81%D1%82%D0%BE%D1%82%D0%BD%D1%8B%D0%B9_%D0%B8%D0%BD%D1%82%D0%B5%D1%80%D0%B2%D0%B0%D0%BB)
DFT (Discrete Fourier Transform)* ДПФ (Дискретное преобразование Фурье)
FFT (Fast Fourier Transform)* БПФ (Быстрое преобразование Фурье)
finer details** тонкие детали (моменты)
STFT (Short Time Fourier Transform)* ОПФ (Оконное преобразование Фурье)
ISTFT (Invers Short Time Fourier Transform) ООПФ (Обратное Оконное преобразование Фурье)
Mel spectrogram*** Мэл спектрограмма
mel filterbank*** банк фильтров мэла
mel bands*** мэл диапазонов
log-mel spectrogram*** лог-мэл-спектрограммой
power spectrogram спектрограмма мощности
dataset набор данных (датасет)
dataset split часть набора данных (сплит), например часть набора данных для валидации/обучения, тестовая часть набора данных.
raw audio data сырые (небоработанные) аудио данные
features признак (параметр)
inference**** инференс
in-place** непосредственно/на месте
aliases! наложение (ТОЛЬКО В СЛУЧАЕ ЕСЛИ РЕЧЬ В КОНТЕКСТЕ ИДЁТ О ТЕОРЕМЕ НАЙКВИСТА!) https://ru.wikipedia.org/wiki/%D0%90%D0%BB%D0%B8%D0%B0%D1%81%D0%B8%D0%BD%D0%B3
batch батч/пакет
pads дополняет/выравнивает
Streaming Потоковая передача
Monty Montgomery Монти Монтгомери
dither дизеринг https://ru.wikipedia.org/wiki/%D0%94%D0%B8%D0%B7%D0%B5%D1%80%D0%B8%D0%BD%D0%B3
Brian McFee Брайан Макфи
principal maintainer главный сопровождающий
evaluate оценивать
gentle introduction краткое введение
Tour Обзор (пример Tour of audio applications - Обзор аудио приложений)
Refresher on Освежим в памяти.
embedding vector вектор эмбедингов (сжатое векторное представление).
encoder энкодер (кодер)
decoder декодер
convolutional layers свёрточных слоёв
Model outputs Выходы модели
padding token токен-заполнитель (так как используется для дополнения предложения - padding)
alignment выравнивание
beam search лучевой поиск (https://en.wikipedia.org/wiki/Beam_search)
Mean opinion score Средняя экспертная оценка
image patche фрагмент изображения
Gaussian mixture model Смешанная модель Гаусса
dropout исключение (обязательно с указанием англ. формы - dropout) https://ru.wikipedia.org/wiki/%D0%98%D1%81%D0%BA%D0%BB%D1%8E%D1%87%D0%B5%D0%BD%D0%B8%D0%B5_(%D0%BD%D0%B5%D0%B9%D1%80%D0%BE%D0%BD%D0%BD%D1%8B%D0%B5_%D1%81%D0%B5%D1%82%D0%B8)
Mean opinion score Средняя экспертная оценка
wake word "слово пробуждения" (слово, благодаря анличию которого голосовой ассистент понимает что обращаются к нему)
chank фрагмент (напр. фрагмент аудиообразца)
chanking фрагментирование (процесс дробления чего-либо на множество мелких разрозненных фрагментов)
Примечание:
* - сокращение или аббревиатура, не переводится. Либо заменяется на соответствующий русскоязычный аналог. Можно найти в википедии сначало английский вариант, затем переключиться в русскоязычную википедию. Так удасться получить общепринятый перевод термина.
** - правильный выбор варианта перевода на основе контекста
*** - не нашел аналога, перевожу основываясь на часто употребляемой в рунете форме.
**** - рассматриваю как неологизм, так как вариант перевода "вывод" не совсем коректный. Причина проста, есть еще и "вывод модели" - данные получаемые после обработки входных данных моделью. Если переводить "inference" как "вывод" получится путаница.
! - сложный случай контекстного перевода, ОЧЕНЬ ВАЖЕН КОНТЕКСТ УПОТРЕБЛЕНИЯ ТЕРМИНА!
| 8 |
0 | hf_public_repos/audio-transformers-course/chapters/ru | hf_public_repos/audio-transformers-course/chapters/ru/chapter0/get_ready.mdx | # Подготовьтесь к прохождению курса
Мы надеемся, что вам не терпится приступить к изучению курса, и мы создали эту страницу, чтобы у вас было
все необходимое для того, чтобы вы могли сразу же приступить к работе!
## Шаг 1. Зарегистрируйтесь
Чтобы быть в курсе всех обновлений и специальных социальных мероприятий, подпишитесь на курс.
[👉 ПОДПИСАТЬСЯ](http://eepurl.com/insvcI)
## Шаг 2. Получите учетную запись Hugging Face
Если у вас ее еще нет, создайте учетную запись Hugging Face (это бесплатно). Она понадобится вам для выполнения практических заданий, для
получения сертификата о прохождении курса, для изучения предварительно обученных моделей, для доступа к наборам данных и многого другого.
[👉 СОЗДАТЬ АККАУНТ HUGGING FACE](https://huggingface.co/join)
## Шаг 3. Освежите в памяти основы (если это необходимо)
Мы предполагаем, что вы знакомы с основами глубокого обучения и имеете общее представление о трансформерах. Если вам необходимо
подтянуть свои знания о трансформерах, обратитесь к нашему [курсу по NLP](https://huggingface.co/course/chapter1/1).
## Шаг 4. Проверьте ваши настройки
Для изучения материалов курса вам понадобятся:
- Компьютер с подключением к Интернет
- [Google Colab](https://colab.research.google.com) для практических занятий. Достаточно бесплатной версии.
Если вы еще никогда не использовали Google Colab, ознакомьтесь с этим [официальным ознакомительным блокнотом](https://colab.research.google.com/notebooks/intro.ipynb).
## Шаг 5. Присоединяйтесь к сообществу
Зарегистрируйтесь на нашем сервере Discord - месте, где вы можете обмениваться идеями с однокурсниками и обращаться к нам (команде Hugging Face).
[👉 ПРИСОЕДИНЯЙТЕСЬ К СООБЩЕСТВУ В DISCORD](http://hf.co/join/discord)
Чтобы узнать больше о нашем сообществе в Discord и о том, как извлечь из него максимум пользы, посетите [следующую страницу](community).
| 9 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.