modelId
stringlengths
4
81
tags
list
pipeline_tag
stringclasses
17 values
config
dict
downloads
int64
0
59.7M
first_commit
timestamp[ns, tz=UTC]
card
stringlengths
51
438k
Axon/resnet18-v1
[ "dataset:ImageNet", "arxiv:1512.03385", "Axon", "Elixir", "license:apache-2.0" ]
null
{ "architectures": null, "model_type": null, "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
0
null
--- license: creativeml-openrail-m --- ##Textual Inversion Embed + Hypernetwork For SD 2 models by ShadoWxShinigamI Trained on 200 BLIP Captioned images from my personal MJ Generations. Meant to be used with 768 Models. 16 Vectors - 625 Steps - TI Embed Swish - 10000 Steps - Hypernetwork. The Hypernetwork is meant to be an augment to be used alongside the embed. Using at 0.5 Strength tends to produce the best output (YMMV) Examples :- ![image.png](https://s3.amazonaws.com/moonup/production/uploads/1670827476778-633a520aecbd8b19357b4806.png) ![00001-335098425.png](https://s3.amazonaws.com/moonup/production/uploads/1670828191063-633a520aecbd8b19357b4806.png) ![anime.png](https://s3.amazonaws.com/moonup/production/uploads/1670828241828-633a520aecbd8b19357b4806.png) ![monkey.png](https://s3.amazonaws.com/moonup/production/uploads/1670828303588-633a520aecbd8b19357b4806.png) ![panda.png](https://s3.amazonaws.com/moonup/production/uploads/1670828302002-633a520aecbd8b19357b4806.png)
Axon/resnet50-v1
[ "dataset:ImageNet", "arxiv:1512.03385", "Axon", "Elixir", "license:apache-2.0" ]
null
{ "architectures": null, "model_type": null, "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
0
2022-12-12T06:51:06Z
--- tags: - unity-ml-agents - ml-agents - deep-reinforcement-learning - reinforcement-learning - ML-Agents-Huggy library_name: ml-agents --- # **ppo** Agent playing **Huggy** This is a trained model of a **ppo** agent playing **Huggy** using the [Unity ML-Agents Library](https://github.com/Unity-Technologies/ml-agents). ## Usage (with ML-Agents) The Documentation: https://github.com/huggingface/ml-agents#get-started We wrote a complete tutorial to learn to train your first agent using ML-Agents and publish it to the Hub: ### Resume the training ``` mlagents-learn <your_configuration_file_path.yaml> --run-id=<run_id> --resume ``` ### Watch your Agent play You can watch your agent **playing directly in your browser:**. 1. Go to https://huggingface.co/spaces/unity/ML-Agents-Huggy 2. Step 1: Write your model_id: gufte/ppo-Huggy 3. Step 2: Select your *.nn /*.onnx file 4. Click on Watch the agent play 👀
Ayham/bert_gpt2_summarization_cnndm
[ "pytorch", "tensorboard", "encoder-decoder", "text2text-generation", "dataset:cnn_dailymail", "transformers", "generated_from_trainer", "autotrain_compatible" ]
text2text-generation
{ "architectures": [ "EncoderDecoderModel" ], "model_type": "encoder-decoder", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
4
null
--- license: apache-2.0 tags: - whisper-event - generated_from_trainer datasets: - NbAiLab/NCC_S metrics: - wer model-index: - name: Whisper Base Norwegian results: - task: name: Automatic Speech Recognition type: automatic-speech-recognition dataset: name: NbAiLab/NCC_S type: NbAiLab/NCC_S config: 'no' split: validation args: 'no' metrics: - name: Wer type: wer value: 15.012180267965894 --- <!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # Whisper Base Norwegian This model is a fine-tuned version of [pere/whisper-small-nob-clr](https://huggingface.co/pere/whisper-small-nob-clr) on the NbAiLab/NCC_S dataset. It achieves the following results on the evaluation set: - Loss: 0.3284 - Wer: 15.0122 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 1e-05 - train_batch_size: 64 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 2 - total_train_batch_size: 128 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: constant_with_warmup - training_steps: 3000 - mixed_precision_training: Native AMP ### Training results | Training Loss | Epoch | Step | Validation Loss | Wer | |:-------------:|:-----:|:----:|:---------------:|:-------:| | 0.5975 | 0.33 | 1000 | 0.3354 | 15.7734 | | 0.5783 | 0.67 | 2000 | 0.3327 | 16.3520 | | 0.5788 | 1.0 | 3000 | 0.3284 | 15.0122 | ### Framework versions - Transformers 4.26.0.dev0 - Pytorch 1.13.0+cu117 - Datasets 2.7.1.dev0 - Tokenizers 0.13.2
Ayham/bert_gpt2_summarization_cnndm_new
[ "pytorch", "tensorboard", "encoder-decoder", "text2text-generation", "dataset:cnn_dailymail", "transformers", "generated_from_trainer", "autotrain_compatible" ]
text2text-generation
{ "architectures": [ "EncoderDecoderModel" ], "model_type": "encoder-decoder", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
8
null
--- license: mit tags: - pytorch - diffusers - unconditional-image-generation - diffusion-models-class --- This model is a diffusion model for unconditional image generation of butterflies trained on the ceyda smithsonian butterflies at a sample resolution of 64*64. ## Usage ```python from diffusers import DDPMPipeline pipeline = DDPMPipeline.from_pretrained('Apocalypse-19/ceyda-butterflies-64') image = pipeline().images[0] image ```
Ayham/bert_gpt2_summarization_xsum
[ "pytorch", "tensorboard", "encoder-decoder", "text2text-generation", "dataset:xsum", "transformers", "generated_from_trainer", "autotrain_compatible" ]
text2text-generation
{ "architectures": [ "EncoderDecoderModel" ], "model_type": "encoder-decoder", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
6
null
--- tags: - unity-ml-agents - ml-agents - deep-reinforcement-learning - reinforcement-learning - ML-Agents-Huggy library_name: ml-agents --- # **ppo** Agent playing **Huggy** This is a trained model of a **ppo** agent playing **Huggy** using the [Unity ML-Agents Library](https://github.com/Unity-Technologies/ml-agents). ## Usage (with ML-Agents) The Documentation: https://github.com/huggingface/ml-agents#get-started We wrote a complete tutorial to learn to train your first agent using ML-Agents and publish it to the Hub: ### Resume the training ``` mlagents-learn <your_configuration_file_path.yaml> --run-id=<run_id> --resume ``` ### Watch your Agent play You can watch your agent **playing directly in your browser:**. 1. Go to https://huggingface.co/spaces/unity/ML-Agents-Huggy 2. Step 1: Write your model_id: Pech82/ppo-Huggy 3. Step 2: Select your *.nn /*.onnx file 4. Click on Watch the agent play 👀
Ayham/bert_roberta_summarization_cnn_dailymail
[ "pytorch", "tensorboard", "encoder-decoder", "text2text-generation", "dataset:cnn_dailymail", "transformers", "generated_from_trainer", "autotrain_compatible" ]
text2text-generation
{ "architectures": [ "EncoderDecoderModel" ], "model_type": "encoder-decoder", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
3
2022-12-12T07:46:35Z
--- license: creativeml-openrail-m tags: - text-to-image widget: - text: samik --- ### samik_test Dreambooth model trained by sokobanni with [Hugging Face Dreambooth Training Space](https://huggingface.co/spaces/multimodalart/dreambooth-training) with the v2-1-768 base model You run your new concept via `diffusers` [Colab Notebook for Inference](https://colab.research.google.com/github/huggingface/notebooks/blob/main/diffusers/sd_dreambooth_inference.ipynb). Don't forget to use the concept prompts! Sample pictures of: samik (use that on your prompt) ![samik 0](https://huggingface.co/sokobanni/samik-test/resolve/main/concept_images/samik_%281%29.jpg)![samik 1](https://huggingface.co/sokobanni/samik-test/resolve/main/concept_images/samik_%282%29.jpg)![samik 2](https://huggingface.co/sokobanni/samik-test/resolve/main/concept_images/samik_%283%29.jpg)![samik 3](https://huggingface.co/sokobanni/samik-test/resolve/main/concept_images/samik_%284%29.jpg)![samik 4](https://huggingface.co/sokobanni/samik-test/resolve/main/concept_images/samik_%285%29.jpg)![samik 5](https://huggingface.co/sokobanni/samik-test/resolve/main/concept_images/samik_%286%29.jpg)![samik 6](https://huggingface.co/sokobanni/samik-test/resolve/main/concept_images/samik_%287%29.jpg)![samik 7](https://huggingface.co/sokobanni/samik-test/resolve/main/concept_images/samik_%288%29.jpg)![samik 8](https://huggingface.co/sokobanni/samik-test/resolve/main/concept_images/samik_%289%29.jpg)![samik 9](https://huggingface.co/sokobanni/samik-test/resolve/main/concept_images/samik_%2810%29.jpg)![samik 10](https://huggingface.co/sokobanni/samik-test/resolve/main/concept_images/samik_%2811%29.jpg)![samik 11](https://huggingface.co/sokobanni/samik-test/resolve/main/concept_images/samik_%2812%29.jpg)
Ayham/roberta_distilgpt2_summarization_cnn_dailymail
[ "pytorch", "tensorboard", "encoder-decoder", "text2text-generation", "dataset:cnn_dailymail", "transformers", "generated_from_trainer", "autotrain_compatible" ]
text2text-generation
{ "architectures": [ "EncoderDecoderModel" ], "model_type": "encoder-decoder", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
4
2022-12-13T12:01:56Z
--- license: mit tags: - generated_from_trainer datasets: - imagefolder model-index: - name: donut-base-sroie results: [] --- <!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # donut-base-sroie This model is a fine-tuned version of [naver-clova-ix/donut-base](https://huggingface.co/naver-clova-ix/donut-base) on the imagefolder dataset. ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 3.4707116138614145e-05 - train_batch_size: 2 - eval_batch_size: 8 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 30 - mixed_precision_training: Native AMP ### Training results ### Framework versions - Transformers 4.27.0.dev0 - Pytorch 1.13.1+cu116 - Datasets 2.8.0 - Tokenizers 0.13.2
Ayham/roberta_gpt2_summarization_cnn_dailymail
[ "pytorch", "tensorboard", "encoder-decoder", "text2text-generation", "dataset:cnn_dailymail", "transformers", "generated_from_trainer", "autotrain_compatible" ]
text2text-generation
{ "architectures": [ "EncoderDecoderModel" ], "model_type": "encoder-decoder", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
31
null
--- license: creativeml-openrail-m --- Stable Diffusion v1-5 with the fine-tuned VAE `sd-vae-ft-mse` and files with config modifications for making it better to fine-tune made by [fast-stable-diffusion by TheLastBen](https://github.com/TheLastBen/fast-stable-diffusion) to be used on [fastDreambooth Colab Notebook](https://colab.research.google.com/github/TheLastBen/fast-stable-diffusion/blob/main/fast-DreamBooth.ipynb) and on the [Dreambooth Training Space](https://huggingface.co/spaces/multimodalart/dreambooth-training) Is not suited for inference and training elsewhere is under your own risk. The [model LICENSE](https://huggingface.co/spaces/CompVis/stable-diffusion-license) still applies normally for this use-case. Refer to the [original repository](https://huggingface.co/runwayml/stable-diffusion-v1-5) for the model card
Ayjayo/DialoGPT-medium-AyjayoAI
[ "pytorch", "gpt2", "text-generation", "transformers", "conversational" ]
conversational
{ "architectures": [ "GPT2LMHeadModel" ], "model_type": "gpt2", "task_specific_params": { "conversational": { "max_length": 1000 }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
12
2022-12-12T09:38:01Z
--- model-index: - name: YLIK/stt_en_conformer_ctc_small results: - task: type: automatic-speech-recognition dataset: name: Librispeech (clean) type: librispeech_asr config: other split: test args: language: en metrics: - type: wer value: 8.1 name: WER --- # Model Card for Model ID <!-- Provide a quick summary of what the model is/does. --> # Table of Contents 1. [Model Details](#model-details) 2. [Uses](#uses) 3. [Bias, Risks, and Limitations](#bias-risks-and-limitations) 4. [Training Details](#training-details) 5. [Evaluation](#evaluation) 6. [Model Examination](#model-examination-optional) 7. [Environmental Impact](#environmental-impact) 8. [Technical Specifications](#technical-specifications-optional) 9. [Citation](#citation-optional) 10. [Glossary](#glossary-optional) 11. [More Information](#more-information-optional) 12. [Model Card Authors](#model-card-authors-optional) 13. [Model Card Contact](#model-card-contact) 14. [How To Get Started With the Model](#how-to-get-started-with-the-model) # Model Details ## Model Description <!-- Provide a longer summary of what this model is. --> - **Developed by:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Model type:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] - **Finetuned from model [optional]:** [More Information Needed] - **Resources for more information:** [More Information Needed] # Uses <!-- Address questions around how the model is intended to be used, including the foreseeable users of the model and those affected by the model. --> ## Direct Use <!-- This section is for the model use without fine-tuning or plugging into a larger ecosystem/app. --> [More Information Needed] ## Downstream Use [optional] <!-- This section is for the model use when fine-tuned for a task, or when plugged into a larger ecosystem/app --> [More Information Needed] ## Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the model will not work well for. --> [More Information Needed] # Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ## Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations. # Training Details ## Training Data <!-- This should link to a Data Card, perhaps with a short stub of information on what the training data is all about as well as documentation related to data pre-processing or additional filtering. --> [More Information Needed] ## Training Procedure [optional] <!-- This relates heavily to the Technical Specifications. Content here should link to that section when it is relevant to the training procedure. --> ### Preprocessing [More Information Needed] ### Speeds, Sizes, Times <!-- This section provides information about throughput, start/end time, checkpoint size if relevant, etc. --> [More Information Needed] # Evaluation <!-- This section describes the evaluation protocols and provides the results. --> ## Testing Data, Factors & Metrics ### Testing Data <!-- This should link to a Data Card if possible. --> [More Information Needed] ### Factors <!-- These are the things the evaluation is disaggregating by, e.g., subpopulations or domains. --> [More Information Needed] ### Metrics <!-- These are the evaluation metrics being used, ideally with a description of why. --> [More Information Needed] ## Results [More Information Needed] # Model Examination [optional] <!-- Relevant interpretability work for the model goes here --> [More Information Needed] # Environmental Impact <!-- Total emissions (in grams of CO2eq) and additional considerations, such as electricity usage, go here. Edit the suggested text below accordingly --> Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700). - **Hardware Type:** [More Information Needed] - **Hours used:** [More Information Needed] - **Cloud Provider:** [More Information Needed] - **Compute Region:** [More Information Needed] - **Carbon Emitted:** [More Information Needed] # Technical Specifications [optional] ## Model Architecture and Objective [More Information Needed] ## Compute Infrastructure [More Information Needed] ### Hardware [More Information Needed] ### Software [More Information Needed] # Citation [optional] <!-- If there is a paper or blog post introducing the model, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] # Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the model or model card. --> [More Information Needed] # More Information [optional] [More Information Needed] # Model Card Authors [optional] [More Information Needed] # Model Card Contact [More Information Needed] # How to Get Started with the Model Use the code below to get started with the model. <details> <summary> Click to expand </summary> [More Information Needed] </details>
Aymene/opus-mt-en-ro-finetuned-en-to-ro
[]
null
{ "architectures": null, "model_type": null, "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
0
2022-12-12T09:42:56Z
--- library_name: stable-baselines3 tags: - LunarLander-v2 - deep-reinforcement-learning - reinforcement-learning - stable-baselines3 model-index: - name: PPO results: - task: type: reinforcement-learning name: reinforcement-learning dataset: name: LunarLander-v2 type: LunarLander-v2 metrics: - type: mean_reward value: 252.79 +/- 19.40 name: mean_reward verified: false --- # **PPO** Agent playing **LunarLander-v2** This is a trained model of a **PPO** agent playing **LunarLander-v2** using the [stable-baselines3 library](https://github.com/DLR-RM/stable-baselines3). ## Usage (with Stable-baselines3) TODO: Add your code ```python from stable_baselines3 import ... from huggingface_sb3 import load_from_hub ... ```
Ayran/DialoGPT-medium-harry-potter-1-through-3
[ "pytorch", "gpt2", "text-generation", "transformers", "conversational" ]
conversational
{ "architectures": [ "GPT2LMHeadModel" ], "model_type": "gpt2", "task_specific_params": { "conversational": { "max_length": 1000 }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
12
2022-12-12T09:56:02Z
--- language: - ca license: apache-2.0 tags: - "catalan" - "masked-lm" - "longformer" - "longformer-base-4096-ca-v2" - "CaText" - "Catalan Textual Corpus" widget: - text: "El Català és una llengua molt <mask>." - text: "Salvador Dalí va viure a <mask>." - text: "La Costa Brava té les millors <mask> d'Espanya." - text: "El cacaolat és un batut de <mask>." - text: "<mask> és la capital de la Garrotxa." - text: "Vaig al <mask> a buscar bolets." - text: "Antoni Gaudí vas ser un <mask> molt important per la ciutat." - text: "Catalunya és una referència en <mask> a nivell europeu." --- # Catalan Longformer (longformer-base-4096-ca-v2) base model ## Table of Contents <details> <summary>Click to expand</summary> - [Model description](#model-description) - [Intended uses and limitations](#intended-uses) - [How to use](#how-to-use) - [Limitations and bias](#limitations-and-bias) - [Training](#training) - [Training data](#training-data) - [Training procedure](#training-procedure) - [Evaluation](#evaluation) - [CLUB benchmark](#club-benchmark) - [Evaluation results](#evaluation-results) - [Licensing Information](#licensing-information) - [Additional information](#additional-information) - [Author](#author) - [Contact information](#contact-information) - [Copyright](#copyright) - [Licensing information](#licensing-information) - [Funding](#funding) - [Citing information](#citing-information) - [Disclaimer](#disclaimer) </details> ## Model description The **longformer-base-4096-ca-v2** is the [Longformer](https://huggingface.co/allenai/longformer-base-4096) version of the [roberta-base-ca-v2](https://huggingface.co/projecte-aina/roberta-base-ca-v2) masked language model for the Catalan language. The use of these models allows us to process larger contexts (up to 4096 tokens) as input without the need of additional aggregation strategies. The pretraining process of this model started from the **roberta-base-ca-v2** checkpoint and was pretrained for MLM on both short and long documents in Catalan. The Longformer model uses a combination of sliding window (local) attention and global attention. Global attention is user-configured based on the task to allow the model to learn task-specific representations. Please refer to the original [paper](https://arxiv.org/abs/2004.05150) for more details on how to set global attention. ## Intended uses and limitations The **longformer-base-4096-ca-v2** model is ready-to-use only for masked language modeling to perform the Fill Mask task (try the inference API or read the next section). However, it is intended to be fine-tuned on non-generative downstream tasks such as Question Answering, Text Classification, or Named Entity Recognition. ## How to use Here is how to use this model: ```python from transformers import AutoModelForMaskedLM from transformers import AutoTokenizer, FillMaskPipeline from pprint import pprint tokenizer_hf = AutoTokenizer.from_pretrained('projecte-aina/longformer-base-4096-ca-v2') model = AutoModelForMaskedLM.from_pretrained('projecte-aina/longformer-base-4096-ca-v2') model.eval() pipeline = FillMaskPipeline(model, tokenizer_hf) text = f"Em dic <mask>." res_hf = pipeline(text) pprint([r['token_str'] for r in res_hf]) ``` ## Limitations and bias At the time of submission, no measures have been taken to estimate the bias embedded in the model. However, we are well aware that our models may be biased since the corpus have been collected using crawling techniques on multiple web sources. We intend to conduct research in these areas in the future, and if completed, this model card will be updated. ## Training ### Training data The training corpus consists of several corpora gathered from web crawling and public corpora. | Corpus | Size in GB | |-------------------------|------------| | Catalan Crawling | 13.00 | | Wikipedia | 1.10 | | DOGC | 0.78 | | Catalan Open Subtitles | 0.02 | | Catalan Oscar | 4.00 | | CaWaC | 3.60 | | Cat. General Crawling | 2.50 | | Cat. Goverment Crawling | 0.24 | | ACN | 0.42 | | Padicat | 0.63 | | RacoCatalá | 8.10 | | Nació Digital | 0.42 | | Vilaweb | 0.06 | | Tweets | 0.02 | For this specific pre-training process, we have performed an undersampling process to obtain a corpus of 5,3 GB. ### Training procedure The training corpus has been tokenized using a byte version of Byte-Pair Encoding (BPE) used in the original [RoBERTA](https://arxiv.org/abs/1907.11692) model with a vocabulary size of 50,262 tokens. The RoBERTa-base-bne pre-training consists of a masked language model training that follows the approach employed for the RoBERTa base. The training lasted a total of 37 hours with 8 computing nodes each one with 2 AMD MI50 GPUs of 32GB VRAM. ## Evaluation ### CLUB benchmark The **longformer-base-4096-ca-v2** model has been fine-tuned on the downstream tasks of the [Catalan Language Understanding Evaluation benchmark](https://club.aina.bsc.es/) (CLUB), that has been created along with the model. It contains the following tasks and their related datasets: 1. Named Entity Recognition (NER) **[NER (AnCora)](https://zenodo.org/record/4762031#.YKaFjqGxWUk)**: extracted named entities from the original [Ancora](https://doi.org/10.5281/zenodo.4762030) version, filtering out some unconventional ones, like book titles, and transcribed them into a standard CONLL-IOB format 2. Part-of-Speech Tagging (POS) **[POS (AnCora)](https://zenodo.org/record/4762031#.YKaFjqGxWUk)**: from the [Universal Dependencies treebank](https://github.com/UniversalDependencies/UD_Catalan-AnCora) of the well-known Ancora corpus. 3. Text Classification (TC) **[TeCla](https://huggingface.co/datasets/projecte-aina/tecla)**: consisting of 137k news pieces from the Catalan News Agency ([ACN](https://www.acn.cat/)) corpus, with 30 labels. 4. Textual Entailment (TE) **[TE-ca](https://huggingface.co/datasets/projecte-aina/teca)**: consisting of 21,163 pairs of premises and hypotheses, annotated according to the inference relation they have (implication, contradiction, or neutral), extracted from the [Catalan Textual Corpus](https://huggingface.co/datasets/projecte-aina/catalan_textual_corpus). 5. Semantic Textual Similarity (STS) **[STS-ca](https://huggingface.co/datasets/projecte-aina/sts-ca)**: consisting of more than 3000 sentence pairs, annotated with the semantic similarity between them, scraped from the [Catalan Textual Corpus](https://huggingface.co/datasets/projecte-aina/catalan_textual_corpus). 6. Question Answering (QA): **[VilaQuAD](https://huggingface.co/datasets/projecte-aina/vilaquad)**: contains 6,282 pairs of questions and answers, outsourced from 2095 Catalan language articles from VilaWeb newswire text. **[ViquiQuAD](https://huggingface.co/datasets/projecte-aina/viquiquad)**: consisting of more than 15,000 questions outsourced from Catalan Wikipedia randomly chosen from a set of 596 articles that were originally written in Catalan. **[CatalanQA](https://huggingface.co/datasets/projecte-aina/catalanqa)**: an aggregation of 2 previous datasets (VilaQuAD and ViquiQuAD), 21,427 pairs of Q/A balanced by type of question, containing one question and one answer per context, although the contexts can repeat multiple times. ### Evaluation results After fine-tuning the model on the downstream tasks, it achieved the following performance: ### Evaluation results | Task | NER (F1) | POS (F1) | STS-ca (Comb) | TeCla (Acc.) | TEca (Acc.) | VilaQuAD (F1/EM)| ViquiQuAD (F1/EM) | CatalanQA (F1/EM) | XQuAD-ca <sup>1</sup> (F1/EM) | | ------------|:-------------:| -----:|:------|:------|:-------|:------|:----|:----|:----| | RoBERTa-large-ca-v2 | **89.82** | **99.02** | **83.41** | **75.46** | 83.61 | **89.34/75.50** | **89.20**/75.77 | **90.72/79.06** | **73.79**/55.34 | | RoBERTa-base-ca-v2 | 89.29 | 98.96 | 79.07 | 74.26 | 83.14 | 87.74/72.58 | 88.72/**75.91** | 89.50/76.63 | 73.64/**55.42** | | Longformer-base-4096-ca-v2 | 88.49 | 98.98 | 78.37 | 73.79 | **83.89** | 87.59/72.33 | 88.70/**76.05** | 89.33/77.03 | 73.09/54.83 | | BERTa | 89.76 | 98.96 | 80.19 | 73.65 | 79.26 | 85.93/70.58 | 87.12/73.11 | 89.17/77.14 | 69.20/51.47 | | mBERT | 86.87 | 98.83 | 74.26 | 69.90 | 74.63 | 82.78/67.33 | 86.89/73.53 | 86.90/74.19 | 68.79/50.80 | | XLM-RoBERTa | 86.31 | 98.89 | 61.61 | 70.14 | 33.30 | 86.29/71.83 | 86.88/73.11 | 88.17/75.93 | 72.55/54.16 | <sup>1</sup> : Trained on CatalanQA, tested on XQuAD-ca. ## Additional information ### Author Text Mining Unit (TeMU) at the Barcelona Supercomputing Center ([email protected]) ### Contact information For further information, send an email to [email protected] ### Copyright Copyright (c) 2022 Text Mining Unit at Barcelona Supercomputing Center ### Licensing information [Apache License, Version 2.0](https://www.apache.org/licenses/LICENSE-2.0) ### Funding This work was funded by the [Departament de la Vicepresidència i de Polítiques Digitals i Territori de la Generalitat de Catalunya](https://politiquesdigitals.gencat.cat/ca/inici/index.html#googtrans(ca|en) within the framework of [Projecte AINA](https://politiquesdigitals.gencat.cat/ca/economia/catalonia-ai/aina). ### Disclaimer <details> <summary>Click to expand</summary> The models published in this repository are intended for a generalist purpose and are available to third parties. These models may have bias and/or any other undesirable distortions. When third parties, deploy or provide systems and/or services to other parties using any of these models (or using systems based on these models) or become users of the models, they should note that it is their responsibility to mitigate the risks arising from their use and, in any event, to comply with applicable regulations, including regulations regarding the use of Artificial Intelligence. In no event shall the owner and creator of the models (BSC – Barcelona Supercomputing Center) be liable for any results arising from the use made by third parties of these models. </details>
Ayran/DialoGPT-medium-harry-potter-1-through-4-plus-6
[ "pytorch", "gpt2", "text-generation", "transformers", "conversational" ]
conversational
{ "architectures": [ "GPT2LMHeadModel" ], "model_type": "gpt2", "task_specific_params": { "conversational": { "max_length": 1000 }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
12
null
Hellhound model mixed using AnythingV3 and centaur only tagged images from danbooru Dataset of 200 images with complete tag lists Works ok tho you its not much more than u can get with pure AnythingV3 though it was laking with the detail of fur so i just felt like doing it best prompts would be variations of the example prompt More monster girl models on the way feel free to request your favs :) Hellhound: 1girl, animal_ears, animal_hands, black_fur, black_hair, black_sclera, black_skin, breasts, claws, colored_sclera, colored_skin, dog_ears, flaming_eyes, large_breasts, long_hair, looking_at_viewer, monster_girl, red_eyes, tail, dynamic pose ![Hellhound](https://huggingface.co/scriche/Hellhound/resolve/main/00146.png)
Ayu/Shiriro
[]
null
{ "architectures": null, "model_type": null, "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
0
null
--- license: apache-2.0 tags: - generated_from_trainer metrics: - wer model-index: - name: whisper-calls-small results: [] --- <!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # whisper-calls-small This model is a fine-tuned version of [openai/whisper-small](https://huggingface.co/openai/whisper-small) on the None dataset. Just a test, probably not a very good model ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 1e-05 - train_batch_size: 1 - eval_batch_size: 8 - seed: 42 - gradient_accumulation_steps: 16 - total_train_batch_size: 16 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_steps: 500 - training_steps: 4000 - mixed_precision_training: Native AMP ### Training results | Training Loss | Epoch | Step | Validation Loss | Wer | |:-------------:|:-----:|:----:|:---------------:|:------:| | 0.076 | 4.03 | 1000 | 0.0481 | 4.1883 | | 0.0068 | 8.06 | 2000 | 0.0049 | 0.6362 | | 0.0011 | 12.1 | 3000 | 0.0012 | 0.0157 | | 0.0005 | 16.13 | 4000 | 0.0006 | 0.0 | ### Framework versions - Transformers 4.26.0.dev0 - Pytorch 1.13.0+cu117 - Datasets 2.7.1 - Tokenizers 0.13.2
Azaghast/DistilBART-SCP-ParaSummarization
[ "pytorch", "bart", "text2text-generation", "transformers", "autotrain_compatible" ]
text2text-generation
{ "architectures": [ "BartForConditionalGeneration" ], "model_type": "bart", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": true, "length_penalty": 2, "max_length": 142, "min_length": 56, "no_repeat_ngram_size": 3, "num_beams": 4, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
8
null
--- library_name: stable-baselines3 tags: - LunarLander-v2 - deep-reinforcement-learning - reinforcement-learning - stable-baselines3 model-index: - name: PPO results: - task: type: reinforcement-learning name: reinforcement-learning dataset: name: LunarLander-v2 type: LunarLander-v2 metrics: - type: mean_reward value: 265.04 +/- 38.0587 name: mean_reward verified: false --- ## To Import and Use the model ```python from stable_baselines3 import PPO from huggingface_sb3 import load_from_hub repo_id = "Eslam25/LunarLander-v2-PPO" filename = "ppo_1st.zip" custom_objects = { "learning_rate": 0.0, "lr_schedule": lambda _: 0.0, "clip_range": lambda _: 0.0, } checkpoint = load_from_hub(repo_id, filename) model = PPO.load(checkpoint, custom_objects=custom_objects, print_system_info=True) ```
BAHIJA/distilbert-base-uncased-finetuned-cola
[ "pytorch", "tensorboard", "distilbert", "text-classification", "dataset:glue", "transformers", "generated_from_trainer", "license:apache-2.0", "model-index" ]
text-classification
{ "architectures": [ "DistilBertForSequenceClassification" ], "model_type": "distilbert", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
36
null
--- library_name: PyLaia license: mit tags: - PyLaia - PyTorch - Handwritten text recognition metrics: - CER - WER language: - 'no' --- # Hugin-Munin handwritten text recognition This model performs Handwritten Text Recognition in Norwegian. It was was developed during the [HUGIN-MUNIN project](https://hugin-munin-project.github.io/). ## Model description The model has been trained using the PyLaia library on the [NorHand](https://zenodo.org/record/6542056) document images. Training images were resized with a fixed height of 128 pixels, keeping the original aspect ratio. ## Evaluation results The model achieves the following results: | set | CER (%) | WER (%) | | ----- | ---------- | --------- | | train | 2.17 | 7.65 | | val | 8.78 | 24.93 | | test | 7.94 | 24.04 | Results improve on validation and test sets when PyLaia is combined with a 6-gram language model. The language model is trained on [this text corpus](https://www.nb.no/sprakbanken/en/resource-catalogue/oai-nb-no-sbr-73/) published by the National Library of Norway. | set | CER (%) | WER (%) | | ----- | ---------- | --------- | | train | 2.40 | 8.10 | | val | 7.45 | 19.75 | | test | 6.55 | 18.2 | ## How to use Please refer to the PyLaia library page (https://pypi.org/project/pylaia/) to use this model. # Cite us! ```bibtex @inproceedings{10.1007/978-3-031-06555-2_27, author = {Maarand, Martin and Beyer, Yngvil and K\r{a}sen, Andre and Fosseide, Knut T. and Kermorvant, Christopher}, title = {A Comprehensive Comparison of Open-Source Libraries for Handwritten Text Recognition in Norwegian}, year = {2022}, isbn = {978-3-031-06554-5}, publisher = {Springer-Verlag}, address = {Berlin, Heidelberg}, url = {https://doi.org/10.1007/978-3-031-06555-2_27}, doi = {10.1007/978-3-031-06555-2_27}, booktitle = {Document Analysis Systems: 15th IAPR International Workshop, DAS 2022, La Rochelle, France, May 22–25, 2022, Proceedings}, pages = {399–413}, numpages = {15}, keywords = {Norwegian language, Open-source, Handwriting recognition}, location = {La Rochelle, France} } ```
BME-TMIT/foszt2oszt
[ "pytorch", "encoder-decoder", "text2text-generation", "hu", "transformers", "autotrain_compatible" ]
text2text-generation
{ "architectures": [ "EncoderDecoderModel" ], "model_type": "encoder-decoder", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
15
null
--- license: apache-2.0 tags: - generated_from_trainer metrics: - accuracy model-index: - name: distilbert-base-uncased_cls_SentEval-CR results: [] --- <!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # distilbert-base-uncased_cls_SentEval-CR This model is a fine-tuned version of [distilbert-base-uncased](https://huggingface.co/distilbert-base-uncased) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 0.4301 - Accuracy: 0.9110 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 4e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: cosine - lr_scheduler_warmup_ratio: 0.2 - num_epochs: 5 - mixed_precision_training: Native AMP ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 189 | 0.3037 | 0.8898 | | No log | 2.0 | 378 | 0.3295 | 0.8951 | | 0.2938 | 3.0 | 567 | 0.3413 | 0.9057 | | 0.2938 | 4.0 | 756 | 0.4158 | 0.9070 | | 0.2938 | 5.0 | 945 | 0.4301 | 0.9110 | ### Framework versions - Transformers 4.20.1 - Pytorch 1.11.0 - Datasets 2.1.0 - Tokenizers 0.12.1
BOON/electra-xlnet
[]
null
{ "architectures": null, "model_type": null, "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
0
2022-12-12T11:08:05Z
--- tags: - FrozenLake-v1-4x4-no_slippery - q-learning - reinforcement-learning - custom-implementation model-index: - name: q-FrozenLake-v1-4x4-noSlippery-wohoo results: - task: type: reinforcement-learning name: reinforcement-learning dataset: name: FrozenLake-v1-4x4-no_slippery type: FrozenLake-v1-4x4-no_slippery metrics: - type: mean_reward value: 1.00 +/- 0.00 name: mean_reward verified: false --- # **Q-Learning** Agent playing1 **FrozenLake-v1** This is a trained model of a **Q-Learning** agent playing **FrozenLake-v1** . ## Usage ```python model = load_from_hub(repo_id="osanseviero/q-FrozenLake-v1-4x4-noSlippery-wohoo", filename="q-learning.pkl") # Don't forget to check if you need to add additional attributes (is_slippery=False etc) env = gym.make(model["env_id"]) ```
BSC-LT/RoBERTalex
[ "pytorch", "roberta", "fill-mask", "es", "dataset:legal_ES", "dataset:temu_legal", "arxiv:2110.12201", "transformers", "legal", "spanish", "license:apache-2.0", "autotrain_compatible" ]
fill-mask
{ "architectures": [ "RobertaForMaskedLM" ], "model_type": "roberta", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
24
2022-12-12T11:10:15Z
--- tags: - Taxi-v3 - q-learning - reinforcement-learning - custom-implementation model-index: - name: super_taxi results: - task: type: reinforcement-learning name: reinforcement-learning dataset: name: Taxi-v3 type: Taxi-v3 metrics: - type: mean_reward value: 7.46 +/- 2.78 name: mean_reward verified: false --- # **Q-Learning** Agent playing1 **Taxi-v3** This is a trained model of a **Q-Learning** agent playing **Taxi-v3** . ## Usage ```python model = load_from_hub(repo_id="osanseviero/super_taxi", filename="q-learning.pkl") # Don't forget to check if you need to add additional attributes (is_slippery=False etc) env = gym.make(model["env_id"]) ```
Babelscape/rebel-large
[ "pytorch", "safetensors", "bart", "text2text-generation", "en", "dataset:Babelscape/rebel-dataset", "transformers", "seq2seq", "relation-extraction", "license:cc-by-nc-sa-4.0", "model-index", "autotrain_compatible", "has_space" ]
text2text-generation
{ "architectures": [ "BartForConditionalGeneration" ], "model_type": "bart", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
9,458
null
--- license: apache-2.0 tags: - generated_from_trainer metrics: - wer model-index: - name: w2v2-libri results: [] --- <!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # w2v2-libri This model is a fine-tuned version of [facebook/wav2vec2-base](https://huggingface.co/facebook/wav2vec2-base) on the None dataset. It achieves the following results on the evaluation set: - Loss: 1.7315 - Wer: 0.5574 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0001 - train_batch_size: 16 - eval_batch_size: 8 - seed: 42 - optimizer: Adam with betas=(0.9,0.98) and epsilon=1e-07 - lr_scheduler_type: linear - lr_scheduler_warmup_steps: 500 - training_steps: 3000 - mixed_precision_training: Native AMP ### Training results | Training Loss | Epoch | Step | Validation Loss | Wer | |:-------------:|:-----:|:----:|:---------------:|:------:| | 7.1828 | 50.0 | 200 | 3.0563 | 1.0 | | 2.8849 | 100.0 | 400 | 2.9023 | 1.0 | | 1.5108 | 150.0 | 600 | 1.1468 | 0.6667 | | 0.1372 | 200.0 | 800 | 1.3749 | 0.6279 | | 0.0816 | 250.0 | 1000 | 1.3985 | 0.6224 | | 0.0746 | 300.0 | 1200 | 1.5285 | 0.6141 | | 0.0556 | 350.0 | 1400 | 1.5496 | 0.5920 | | 0.0644 | 400.0 | 1600 | 1.6263 | 0.5947 | | 0.0546 | 450.0 | 1800 | 1.6803 | 0.5906 | | 0.0491 | 500.0 | 2000 | 1.6155 | 0.5837 | | 0.0518 | 550.0 | 2200 | 1.6784 | 0.5698 | | 0.0314 | 600.0 | 2400 | 1.6050 | 0.5602 | | 0.0048 | 650.0 | 2600 | 1.7703 | 0.5546 | | 0.0042 | 700.0 | 2800 | 1.7135 | 0.5615 | | 0.0025 | 750.0 | 3000 | 1.7315 | 0.5574 | ### Framework versions - Transformers 4.25.1 - Pytorch 1.13.0+cu116 - Datasets 1.18.3 - Tokenizers 0.13.2
Babelscape/wikineural-multilingual-ner
[ "pytorch", "tensorboard", "safetensors", "bert", "token-classification", "de", "en", "es", "fr", "it", "nl", "pl", "pt", "ru", "multilingual", "dataset:Babelscape/wikineural", "transformers", "named-entity-recognition", "sequence-tagger-model", "license:cc-by-nc-sa-4.0", "autotrain_compatible" ]
token-classification
{ "architectures": [ "BertForTokenClassification" ], "model_type": "bert", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
41,608
null
--- library_name: stable-baselines3 tags: - LunarLander-v2 - deep-reinforcement-learning - reinforcement-learning - stable-baselines3 model-index: - name: PPO results: - task: type: reinforcement-learning name: reinforcement-learning dataset: name: LunarLander-v2 type: LunarLander-v2 metrics: - type: mean_reward value: 259.85 +/- 19.20 name: mean_reward verified: false --- # **PPO** Agent playing **LunarLander-v2** This is a trained model of a **PPO** agent playing **LunarLander-v2** using the [stable-baselines3 library](https://github.com/DLR-RM/stable-baselines3). ## Usage (with Stable-baselines3) TODO: Add your code ```python from stable_baselines3 import ... from huggingface_sb3 import load_from_hub ... ```
Babysittingyoda/DialoGPT-small-familyguy
[ "pytorch", "gpt2", "text-generation", "transformers", "conversational" ]
conversational
{ "architectures": [ "GPT2LMHeadModel" ], "model_type": "gpt2", "task_specific_params": { "conversational": { "max_length": 1000 }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
13
2022-12-12T11:45:00Z
--- language: - es tags: - pytorch - causal-lm license: apache-2.0 datasets: - bertin-project/mc4-es-sampled - infolibros --- # BERTIN GPT-J-6B-infolibros This model is a fine-tuned version of [BERTIN-GPT-J-6B](https://huggingface.co/bertin-project/bertin-gpt-j-6B) on the [InfoLibros Corpus](https://zenodo.org/record/7254400).
Banshee/dialoGPT-small-luke
[]
null
{ "architectures": null, "model_type": null, "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
0
null
--- license: afl-3.0 --- # Overview `ul2-large-japanese` is a Japanese version of UL2. # TODOs - [ ] Documentation - [x] Pre-training - [ ] evaluate on downstream tasks - If you get some experimental results of this model on downstream tasks, please feel free to make Pull Requests. # Results - Under construction ## Question Answering ## Others # Acknowledgement Research supported with Cloud TPUs from Google's TPU Research Cloud (TRC)
Barbarameerr/Barbara
[]
null
{ "architectures": null, "model_type": null, "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
0
2022-12-12T12:24:43Z
--- license: mit tags: - generated_from_trainer metrics: - accuracy model-index: - name: roberta-large_cls_CR results: [] --- <!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # roberta-large_cls_CR This model is a fine-tuned version of [roberta-large](https://huggingface.co/roberta-large) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 0.3325 - Accuracy: 0.9043 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 4e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: cosine - lr_scheduler_warmup_ratio: 0.2 - num_epochs: 5 - mixed_precision_training: Native AMP ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 213 | 0.4001 | 0.875 | | No log | 2.0 | 426 | 0.4547 | 0.8324 | | 0.499 | 3.0 | 639 | 0.3161 | 0.8963 | | 0.499 | 4.0 | 852 | 0.3219 | 0.9069 | | 0.2904 | 5.0 | 1065 | 0.3325 | 0.9043 | ### Framework versions - Transformers 4.20.1 - Pytorch 1.11.0 - Datasets 2.1.0 - Tokenizers 0.12.1
Barleysack/AERoberta
[ "pytorch", "roberta", "question-answering", "transformers", "autotrain_compatible" ]
question-answering
{ "architectures": [ "RobertaForQuestionAnswering" ], "model_type": "roberta", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
7
null
--- library_name: stable-baselines3 tags: - LunarLander-v2 - deep-reinforcement-learning - reinforcement-learning - stable-baselines3 model-index: - name: PPO results: - task: type: reinforcement-learning name: reinforcement-learning dataset: name: LunarLander-v2 type: LunarLander-v2 metrics: - type: mean_reward value: 273.72 +/- 19.85 name: mean_reward verified: false --- # **PPO** Agent playing **LunarLander-v2** This is a trained model of a **PPO** agent playing **LunarLander-v2** using the [stable-baselines3 library](https://github.com/DLR-RM/stable-baselines3). ## Usage (with Stable-baselines3) TODO: Add your code ```python from stable_baselines3 import ... from huggingface_sb3 import load_from_hub ... ```
BatuhanYilmaz/mlm-finetuned-imdb
[]
null
{ "architectures": null, "model_type": null, "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
0
null
--- license: creativeml-openrail-m tags: - text-to-image - stable-diffusion --- ### rembrantSito Dreambooth model trained by theguaz with [TheLastBen's fast-DreamBooth](https://colab.research.google.com/github/TheLastBen/fast-stable-diffusion/blob/main/fast-DreamBooth.ipynb) notebook Test the concept via A1111 Colab [fast-Colab-A1111](https://colab.research.google.com/github/TheLastBen/fast-stable-diffusion/blob/main/fast_stable_diffusion_AUTOMATIC1111.ipynb) Or you can run your new concept via `diffusers` [Colab Notebook for Inference](https://colab.research.google.com/github/huggingface/notebooks/blob/main/diffusers/sd_dreambooth_inference.ipynb) Sample pictures of this concept: ![0](https://huggingface.co/theguaz/rembrantsito/resolve/main/sample_images/rembrandt_3.jpeg) ![1](https://huggingface.co/theguaz/rembrantsito/resolve/main/sample_images/rembrandt_13.jpg) ![2](https://huggingface.co/theguaz/rembrantsito/resolve/main/sample_images/rembrandt_1.jpeg) ![3](https://huggingface.co/theguaz/rembrantsito/resolve/main/sample_images/rembrandt_11.jpeg) ![4](https://huggingface.co/theguaz/rembrantsito/resolve/main/sample_images/rembrandt_10.jpg) ![5](https://huggingface.co/theguaz/rembrantsito/resolve/main/sample_images/rembrandt_12.jpeg) ![6](https://huggingface.co/theguaz/rembrantsito/resolve/main/sample_images/rembrandt_5.jpeg) ![7](https://huggingface.co/theguaz/rembrantsito/resolve/main/sample_images/rembrandt_0.jpeg) ![8](https://huggingface.co/theguaz/rembrantsito/resolve/main/sample_images/rembrandt_9.jpg) ![9](https://huggingface.co/theguaz/rembrantsito/resolve/main/sample_images/rembrandt_2.jpeg) ![10](https://huggingface.co/theguaz/rembrantsito/resolve/main/sample_images/rembrandt_6.jpeg) ![11](https://huggingface.co/theguaz/rembrantsito/resolve/main/sample_images/rembrandt_7.jpeg) ![12](https://huggingface.co/theguaz/rembrantsito/resolve/main/sample_images/rembrandt_8.jpg) ![13](https://huggingface.co/theguaz/rembrantsito/resolve/main/sample_images/rembrandt_4.jpeg)
Baybars/debateGPT
[]
null
{ "architectures": null, "model_type": null, "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
0
null
--- license: apache-2.0 tags: - generated_from_trainer metrics: - accuracy model-index: - name: bert-large-uncased_cls_sst2 results: [] --- <!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # bert-large-uncased_cls_sst2 This model is a fine-tuned version of [bert-large-uncased](https://huggingface.co/bert-large-uncased) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 0.3787 - Accuracy: 0.9255 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 4e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: cosine - lr_scheduler_warmup_ratio: 0.2 - num_epochs: 5 - mixed_precision_training: Native AMP ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 433 | 0.4188 | 0.8578 | | 0.3762 | 2.0 | 866 | 0.4894 | 0.8968 | | 0.3253 | 3.0 | 1299 | 0.3313 | 0.9094 | | 0.1601 | 4.0 | 1732 | 0.3399 | 0.9232 | | 0.0744 | 5.0 | 2165 | 0.3787 | 0.9255 | ### Framework versions - Transformers 4.20.1 - Pytorch 1.11.0 - Datasets 2.1.0 - Tokenizers 0.12.1
Baybars/wav2vec2-xls-r-300m-cv8-turkish
[ "pytorch", "wav2vec2", "automatic-speech-recognition", "tr", "dataset:common_voice", "transformers", "common_voice", "generated_from_trainer", "hf-asr-leaderboard", "robust-speech-event", "license:apache-2.0" ]
automatic-speech-recognition
{ "architectures": [ "Wav2Vec2ForCTC" ], "model_type": "wav2vec2", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
5
null
--- tags: - unity-ml-agents - ml-agents - deep-reinforcement-learning - reinforcement-learning - ML-Agents-Huggy library_name: ml-agents --- # **ppo** Agent playing **Huggy** This is a trained model of a **ppo** agent playing **Huggy** using the [Unity ML-Agents Library](https://github.com/Unity-Technologies/ml-agents). ## Usage (with ML-Agents) The Documentation: https://github.com/huggingface/ml-agents#get-started We wrote a complete tutorial to learn to train your first agent using ML-Agents and publish it to the Hub: ### Resume the training ``` mlagents-learn <your_configuration_file_path.yaml> --run-id=<run_id> --resume ``` ### Watch your Agent play You can watch your agent **playing directly in your browser:**. 1. Go to https://huggingface.co/spaces/unity/ML-Agents-Huggy 2. Step 1: Write your model_id: lukee/ppo-Huggy 3. Step 2: Select your *.nn /*.onnx file 4. Click on Watch the agent play 👀
BeIR/query-gen-msmarco-t5-base-v1
[ "pytorch", "jax", "t5", "text2text-generation", "transformers", "autotrain_compatible" ]
text2text-generation
{ "architectures": [ "T5ForConditionalGeneration" ], "model_type": "t5", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": true, "length_penalty": 2, "max_length": 200, "min_length": 30, "no_repeat_ngram_size": 3, "num_beams": 4, "prefix": "summarize: " }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": true, "max_length": 300, "num_beams": 4, "prefix": "translate English to German: " }, "translation_en_to_fr": { "early_stopping": true, "max_length": 300, "num_beams": 4, "prefix": "translate English to French: " }, "translation_en_to_ro": { "early_stopping": true, "max_length": 300, "num_beams": 4, "prefix": "translate English to Romanian: " } } }
1,816
2023-02-26T12:45:08Z
--- license: apache-2.0 tags: - generated_from_trainer metrics: - rouge model-index: - name: t5-small-finetuned-xsum results: [] --- <!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # t5-small-finetuned-xsum This model is a fine-tuned version of [t5-small](https://huggingface.co/t5-small) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 0.6799 - Rouge1: 16.5588 - Rouge2: 10.1416 - Rougel: 15.5658 - Rougelsum: 15.5525 - Gen Len: 19.0 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.002 - train_batch_size: 10 - eval_batch_size: 10 - seed: 42 - gradient_accumulation_steps: 5 - total_train_batch_size: 50 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 8 - mixed_precision_training: Native AMP ### Training results | Training Loss | Epoch | Step | Validation Loss | Rouge1 | Rouge2 | Rougel | Rougelsum | Gen Len | |:-------------:|:-----:|:----:|:---------------:|:-------:|:-------:|:-------:|:---------:|:-------:| | No log | 1.0 | 180 | 2.0606 | 12.0435 | 4.427 | 10.6651 | 10.6054 | 18.888 | | No log | 2.0 | 360 | 1.4788 | 17.2418 | 9.6974 | 16.1178 | 16.1121 | 19.0 | | 2.5647 | 3.0 | 540 | 1.2028 | 16.3251 | 9.0308 | 15.2903 | 15.2937 | 19.0 | | 2.5647 | 4.0 | 720 | 1.0332 | 16.3718 | 9.5348 | 15.3831 | 15.3778 | 19.0 | | 2.5647 | 5.0 | 900 | 0.9030 | 16.5137 | 9.7914 | 15.5124 | 15.5142 | 19.0 | | 1.1692 | 6.0 | 1080 | 0.8346 | 16.82 | 10.2316 | 15.7513 | 15.7496 | 19.0 | | 1.1692 | 7.0 | 1260 | 0.7406 | 16.6103 | 9.8786 | 15.5361 | 15.5297 | 19.0 | | 1.1692 | 8.0 | 1440 | 0.6799 | 16.5588 | 10.1416 | 15.5658 | 15.5525 | 19.0 | ### Framework versions - Transformers 4.27.0.dev0 - Pytorch 1.13.1+cu116 - Datasets 2.9.0 - Tokenizers 0.13.2
BeIR/query-gen-msmarco-t5-large-v1
[ "pytorch", "jax", "t5", "text2text-generation", "transformers", "autotrain_compatible" ]
text2text-generation
{ "architectures": [ "T5ForConditionalGeneration" ], "model_type": "t5", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": true, "length_penalty": 2, "max_length": 200, "min_length": 30, "no_repeat_ngram_size": 3, "num_beams": 4, "prefix": "summarize: " }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": true, "max_length": 300, "num_beams": 4, "prefix": "translate English to German: " }, "translation_en_to_fr": { "early_stopping": true, "max_length": 300, "num_beams": 4, "prefix": "translate English to French: " }, "translation_en_to_ro": { "early_stopping": true, "max_length": 300, "num_beams": 4, "prefix": "translate English to Romanian: " } } }
1,225
null
--- license: apache-2.0 tags: - generated_from_trainer metrics: - wer model-index: - name: openai/whisper-medium results: [] --- <!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # openai/whisper-medium This model is a fine-tuned version of [openai/whisper-medium](https://huggingface.co/openai/whisper-medium) on the None dataset. It achieves the following results on the evaluation set: - Loss: 0.1711 - Wer: 10.1446 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 1e-06 - train_batch_size: 32 - eval_batch_size: 16 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_steps: 200 - training_steps: 5000 - mixed_precision_training: Native AMP ### Training results | Training Loss | Epoch | Step | Validation Loss | Wer | |:-------------:|:-----:|:----:|:---------------:|:-------:| | 0.111 | 0.2 | 1000 | 0.1786 | 11.0063 | | 0.0961 | 1.16 | 2000 | 0.1719 | 10.5906 | | 0.0732 | 2.12 | 3000 | 0.1743 | 10.3268 | | 0.0742 | 3.08 | 4000 | 0.1715 | 10.2262 | | 0.0692 | 4.03 | 5000 | 0.1711 | 10.1446 | ### Framework versions - Transformers 4.26.0.dev0 - Pytorch 1.13.0+cu117 - Datasets 2.7.1.dev0 - Tokenizers 0.13.2
Belin/T5-Terms-and-Conditions
[]
null
{ "architectures": null, "model_type": null, "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
0
null
--- tags: - unity-ml-agents - ml-agents - deep-reinforcement-learning - reinforcement-learning - ML-Agents-Huggy library_name: ml-agents --- # **ppo** Agent playing **Huggy** This is a trained model of a **ppo** agent playing **Huggy** using the [Unity ML-Agents Library](https://github.com/Unity-Technologies/ml-agents). ## Usage (with ML-Agents) The Documentation: https://github.com/huggingface/ml-agents#get-started We wrote a complete tutorial to learn to train your first agent using ML-Agents and publish it to the Hub: ### Resume the training ``` mlagents-learn <your_configuration_file_path.yaml> --run-id=<run_id> --resume ``` ### Watch your Agent play You can watch your agent **playing directly in your browser:**. 1. Go to https://huggingface.co/spaces/unity/ML-Agents-Huggy 2. Step 1: Write your model_id: aestes/ppo-Huggy 3. Step 2: Select your *.nn /*.onnx file 4. Click on Watch the agent play 👀
Bella4322/Sarah
[]
null
{ "architectures": null, "model_type": null, "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
0
null
--- tags: - generated_from_trainer metrics: - precision - recall - f1 - accuracy model-index: - name: bertinho-ner results: [] --- <!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # bertinho-ner This model is a fine-tuned version of [marcosgg/bert-base-gl-cased](https://huggingface.co/marcosgg/bert-base-gl-cased) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 0.0482 - Precision: 0.8715 - Recall: 0.8975 - F1: 0.8843 - Accuracy: 0.9907 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 8 - eval_batch_size: 8 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 3.0 ### Training results ### Framework versions - Transformers 4.26.0.dev0 - Pytorch 1.13.0+cu116 - Datasets 2.7.1 - Tokenizers 0.13.2
BenDavis71/GPT-2-Finetuning-AIRaid
[ "pytorch", "jax", "gpt2", "text-generation", "transformers" ]
text-generation
{ "architectures": [ "GPT2LMHeadModel" ], "model_type": "gpt2", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
10
null
--- tags: - generated_from_trainer model-index: - name: pegasus-pubmed_radiology-ai-cardiothoracic-0.9 results: [] --- <!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # pegasus-pubmed_radiology-ai-cardiothoracic-0.9 This model is a fine-tuned version of [google/pegasus-pubmed](https://huggingface.co/google/pegasus-pubmed) on the None dataset. ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 3 - eval_batch_size: 3 - seed: 42 - gradient_accumulation_steps: 16 - total_train_batch_size: 48 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_steps: 500 - num_epochs: 1 ### Training results ### Framework versions - Transformers 4.20.1 - Pytorch 1.12.1+cu116 - Datasets 2.4.0 - Tokenizers 0.12.1
BenWitter/DialoGPT-small-Tyrion
[ "pytorch", "gpt2", "text-generation", "transformers" ]
text-generation
{ "architectures": [ "GPT2LMHeadModel" ], "model_type": "gpt2", "task_specific_params": { "conversational": { "max_length": 1000 }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
11
null
--- license: mit tags: - pytorch - diffusers - unconditional-image-generation - diffusion-models-class --- # Model Card for Unit 1 of the [Diffusion Models Class 🧨](https://github.com/huggingface/diffusion-models-class) This model is a diffusion model for unconditional image generation of cute 🦋. ## Usage ```python from diffusers import DDPMPipeline pipeline = DDPMPipeline.from_pretrained('nghenzi/sd-class-butterflies-32') image = pipeline().images[0] image ```
Benicio/t5-small-finetuned-en-to-ru
[ "pytorch", "tensorboard", "t5", "text2text-generation", "transformers", "autotrain_compatible" ]
text2text-generation
{ "architectures": [ "T5ForConditionalGeneration" ], "model_type": "t5", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": true, "length_penalty": 2, "max_length": 200, "min_length": 30, "no_repeat_ngram_size": 3, "num_beams": 4, "prefix": "summarize: " }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": true, "max_length": 300, "num_beams": 4, "prefix": "translate English to German: " }, "translation_en_to_fr": { "early_stopping": true, "max_length": 300, "num_beams": 4, "prefix": "translate English to French: " }, "translation_en_to_ro": { "early_stopping": true, "max_length": 300, "num_beams": 4, "prefix": "translate English to Romanian: " } } }
50
null
--- license: mit tags: - generated_from_trainer metrics: - accuracy model-index: - name: roberta-large_cls_sst2 results: [] --- <!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # roberta-large_cls_sst2 This model is a fine-tuned version of [roberta-large](https://huggingface.co/roberta-large) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 0.3158 - Accuracy: 0.9404 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 4e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: cosine - lr_scheduler_warmup_ratio: 0.2 - num_epochs: 5 - mixed_precision_training: Native AMP ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 433 | 0.3387 | 0.9255 | | 0.4281 | 2.0 | 866 | 0.2740 | 0.9243 | | 0.3047 | 3.0 | 1299 | 0.4185 | 0.9151 | | 0.2014 | 4.0 | 1732 | 0.2850 | 0.9438 | | 0.1026 | 5.0 | 2165 | 0.3158 | 0.9404 | ### Framework versions - Transformers 4.20.1 - Pytorch 1.11.0 - Datasets 2.1.0 - Tokenizers 0.12.1
BertChristiaens/EmojiPredictor
[ "pytorch", "distilbert", "token-classification", "transformers", "autotrain_compatible" ]
token-classification
{ "architectures": [ "DistilBertForTokenClassification" ], "model_type": "distilbert", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
6
null
--- tags: - autotrain - tabular - classification - tabular-classification datasets: - fyhao/autotrain-data-sentiment-analysis co2_eq_emissions: emissions: 0.0803280731181239 widget: structuredData: text: - I am happy --- # Model Trained Using AutoTrain - Problem type: Multi-class Classification - Model ID: 2435575634 - CO2 Emissions (in grams): 0.0803 ## Validation Metrics - Loss: 0.186 - Accuracy: 0.873 - Macro F1: 0.870 - Micro F1: 0.873 - Weighted F1: 0.868 - Macro Precision: 0.938 - Micro Precision: 0.873 - Weighted Precision: 0.896 - Macro Recall: 0.833 - Micro Recall: 0.873 - Weighted Recall: 0.873 ## Usage ```python import json import joblib import pandas as pd model = joblib.load('model.joblib') config = json.load(open('config.json')) features = config['features'] # data = pd.read_csv("data.csv") data = data[features] data.columns = ["feat_" + str(col) for col in data.columns] predictions = model.predict(data) # or model.predict_proba(data) ```
Berzemu/Coco
[]
null
{ "architectures": null, "model_type": null, "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
0
null
--- language: en license: apache-2.0 library_name: diffusers tags: [] datasets: pcam-96 metrics: [] --- <!-- This model card has been generated automatically according to the information the training script had access to. You should probably proofread and complete it, then remove this comment. --> # ddpm-pcam-96-flip ## Model description This diffusion model is trained with the [🤗 Diffusers](https://github.com/huggingface/diffusers) library on the `pcam-96` dataset. ## Intended uses & limitations #### How to use ```python # TODO: add an example code snippet for running this diffusion pipeline ``` #### Limitations and bias [TODO: provide examples of latent issues and potential remediations] ## Training data [TODO: describe the data used to train the model] ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0001 - train_batch_size: 48 - eval_batch_size: 48 - gradient_accumulation_steps: 1 - optimizer: AdamW with betas=(None, None), weight_decay=None and epsilon=None - lr_scheduler: None - lr_warmup_steps: 500 - ema_inv_gamma: None - ema_inv_gamma: None - ema_inv_gamma: None - mixed_precision: fp16 ### Training results 📈 [TensorBoard logs](https://huggingface.co/ankile/ddpm-pcam-96-flip/tensorboard?#scalars)
Bharathdamu/wav2vec2-large-xls-r-300m-hindi2-colab
[]
null
{ "architectures": null, "model_type": null, "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
0
null
Please refer to [flaim](https://github.com/bobmcdear/flaim) for sample usage and more information.
Bharathdamu/wav2vec2-large-xls-r-300m-hindi3-colab
[]
null
{ "architectures": null, "model_type": null, "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
0
null
Please refer to [flaim](https://github.com/bobmcdear/flaim) for sample usage and more information.
Bharathdamu/wav2vec2-model-hindi-stt
[]
null
{ "architectures": null, "model_type": null, "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
0
null
Please refer to [flaim](https://github.com/bobmcdear/flaim) for sample usage and more information.
Bhumika/roberta-base-finetuned-sst2
[ "pytorch", "tensorboard", "roberta", "text-classification", "dataset:glue", "transformers", "generated_from_trainer", "model-index" ]
text-classification
{ "architectures": [ "RobertaForSequenceClassification" ], "model_type": "roberta", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
85
null
Please refer to [flaim](https://github.com/bobmcdear/flaim) for sample usage and more information.
Bhuvana/t5-base-spellchecker
[ "pytorch", "t5", "text2text-generation", "transformers", "autotrain_compatible" ]
text2text-generation
{ "architectures": [ "T5ForConditionalGeneration" ], "model_type": "t5", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": true, "length_penalty": 2, "max_length": 200, "min_length": 30, "no_repeat_ngram_size": 3, "num_beams": 4, "prefix": "summarize: " }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": true, "max_length": 300, "num_beams": 4, "prefix": "translate English to German: " }, "translation_en_to_fr": { "early_stopping": true, "max_length": 300, "num_beams": 4, "prefix": "translate English to French: " }, "translation_en_to_ro": { "early_stopping": true, "max_length": 300, "num_beams": 4, "prefix": "translate English to Romanian: " } } }
93
null
Please refer to [flaim](https://github.com/bobmcdear/flaim) for sample usage and more information.
Bia18/Beatriz
[]
null
{ "architectures": null, "model_type": null, "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
0
null
--- license: mit --- ### Pokemon modern artwork on Stable Diffusion Pokémon modern artwork up to Hisui concept (re-scaled to max width and height 512 px) Includes mega-evolutions, gigamax, regional and alternate forms. Unown variants are excluded, as well as Arceus/Silvally recolours (to avoid same-species overrepresentation) This is the `<pkmn-modern>` concept taught to Stable Diffusion via Textual Inversion. You can load this concept into the [Stable Conceptualizer](https://colab.research.google.com/github/huggingface/notebooks/blob/main/diffusers/stable_conceptualizer_inference.ipynb) notebook. You can also train your own concepts and load them into the concept libraries using [this notebook](https://colab.research.google.com/github/huggingface/notebooks/blob/main/diffusers/sd_textual_inversion_training.ipynb). Here is the new concept you will be able to use as a `style`: ![<pkmn-modern> 0](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/0.jpeg) ![<pkmn-modern> 1](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1.jpeg) ![<pkmn-modern> 2](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/2.jpeg) ![<pkmn-modern> 3](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/3.jpeg) ![<pkmn-modern> 4](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/4.jpeg) ![<pkmn-modern> 5](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/5.jpeg) ![<pkmn-modern> 6](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/6.jpeg) ![<pkmn-modern> 7](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/7.jpeg) ![<pkmn-modern> 8](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/8.jpeg) ![<pkmn-modern> 9](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/9.jpeg) ![<pkmn-modern> 10](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/10.jpeg) ![<pkmn-modern> 11](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/11.jpeg) ![<pkmn-modern> 12](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/12.jpeg) ![<pkmn-modern> 13](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/13.jpeg) ![<pkmn-modern> 14](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/14.jpeg) ![<pkmn-modern> 15](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/15.jpeg) ![<pkmn-modern> 16](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/16.jpeg) ![<pkmn-modern> 17](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/17.jpeg) ![<pkmn-modern> 18](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/18.jpeg) ![<pkmn-modern> 19](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/19.jpeg) ![<pkmn-modern> 20](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/20.jpeg) ![<pkmn-modern> 21](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/21.jpeg) ![<pkmn-modern> 22](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/22.jpeg) ![<pkmn-modern> 23](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/23.jpeg) ![<pkmn-modern> 24](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/24.jpeg) ![<pkmn-modern> 25](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/25.jpeg) ![<pkmn-modern> 26](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/26.jpeg) ![<pkmn-modern> 27](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/27.jpeg) ![<pkmn-modern> 28](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/28.jpeg) ![<pkmn-modern> 29](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/29.jpeg) ![<pkmn-modern> 30](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/30.jpeg) ![<pkmn-modern> 31](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/31.jpeg) ![<pkmn-modern> 32](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/32.jpeg) ![<pkmn-modern> 33](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/33.jpeg) ![<pkmn-modern> 34](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/34.jpeg) ![<pkmn-modern> 35](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/35.jpeg) ![<pkmn-modern> 36](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/36.jpeg) ![<pkmn-modern> 37](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/37.jpeg) ![<pkmn-modern> 38](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/38.jpeg) ![<pkmn-modern> 39](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/39.jpeg) ![<pkmn-modern> 40](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/40.jpeg) ![<pkmn-modern> 41](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/41.jpeg) ![<pkmn-modern> 42](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/42.jpeg) ![<pkmn-modern> 43](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/43.jpeg) ![<pkmn-modern> 44](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/44.jpeg) ![<pkmn-modern> 45](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/45.jpeg) ![<pkmn-modern> 46](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/46.jpeg) ![<pkmn-modern> 47](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/47.jpeg) ![<pkmn-modern> 48](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/48.jpeg) ![<pkmn-modern> 49](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/49.jpeg) ![<pkmn-modern> 50](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/50.jpeg) ![<pkmn-modern> 51](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/51.jpeg) ![<pkmn-modern> 52](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/52.jpeg) ![<pkmn-modern> 53](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/53.jpeg) ![<pkmn-modern> 54](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/54.jpeg) ![<pkmn-modern> 55](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/55.jpeg) ![<pkmn-modern> 56](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/56.jpeg) ![<pkmn-modern> 57](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/57.jpeg) ![<pkmn-modern> 58](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/58.jpeg) ![<pkmn-modern> 59](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/59.jpeg) ![<pkmn-modern> 60](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/60.jpeg) ![<pkmn-modern> 61](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/61.jpeg) ![<pkmn-modern> 62](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/62.jpeg) ![<pkmn-modern> 63](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/63.jpeg) ![<pkmn-modern> 64](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/64.jpeg) ![<pkmn-modern> 65](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/65.jpeg) ![<pkmn-modern> 66](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/66.jpeg) ![<pkmn-modern> 67](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/67.jpeg) ![<pkmn-modern> 68](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/68.jpeg) ![<pkmn-modern> 69](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/69.jpeg) ![<pkmn-modern> 70](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/70.jpeg) ![<pkmn-modern> 71](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/71.jpeg) ![<pkmn-modern> 72](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/72.jpeg) ![<pkmn-modern> 73](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/73.jpeg) ![<pkmn-modern> 74](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/74.jpeg) ![<pkmn-modern> 75](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/75.jpeg) ![<pkmn-modern> 76](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/76.jpeg) ![<pkmn-modern> 77](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/77.jpeg) ![<pkmn-modern> 78](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/78.jpeg) ![<pkmn-modern> 79](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/79.jpeg) ![<pkmn-modern> 80](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/80.jpeg) ![<pkmn-modern> 81](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/81.jpeg) ![<pkmn-modern> 82](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/82.jpeg) ![<pkmn-modern> 83](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/83.jpeg) ![<pkmn-modern> 84](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/84.jpeg) ![<pkmn-modern> 85](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/85.jpeg) ![<pkmn-modern> 86](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/86.jpeg) ![<pkmn-modern> 87](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/87.jpeg) ![<pkmn-modern> 88](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/88.jpeg) ![<pkmn-modern> 89](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/89.jpeg) ![<pkmn-modern> 90](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/90.jpeg) ![<pkmn-modern> 91](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/91.jpeg) ![<pkmn-modern> 92](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/92.jpeg) ![<pkmn-modern> 93](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/93.jpeg) ![<pkmn-modern> 94](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/94.jpeg) ![<pkmn-modern> 95](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/95.jpeg) ![<pkmn-modern> 96](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/96.jpeg) ![<pkmn-modern> 97](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/97.jpeg) ![<pkmn-modern> 98](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/98.jpeg) ![<pkmn-modern> 99](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/99.jpeg) ![<pkmn-modern> 100](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/100.jpeg) ![<pkmn-modern> 101](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/101.jpeg) ![<pkmn-modern> 102](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/102.jpeg) ![<pkmn-modern> 103](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/103.jpeg) ![<pkmn-modern> 104](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/104.jpeg) ![<pkmn-modern> 105](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/105.jpeg) ![<pkmn-modern> 106](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/106.jpeg) ![<pkmn-modern> 107](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/107.jpeg) ![<pkmn-modern> 108](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/108.jpeg) ![<pkmn-modern> 109](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/109.jpeg) ![<pkmn-modern> 110](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/110.jpeg) ![<pkmn-modern> 111](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/111.jpeg) ![<pkmn-modern> 112](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/112.jpeg) ![<pkmn-modern> 113](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/113.jpeg) ![<pkmn-modern> 114](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/114.jpeg) ![<pkmn-modern> 115](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/115.jpeg) ![<pkmn-modern> 116](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/116.jpeg) ![<pkmn-modern> 117](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/117.jpeg) ![<pkmn-modern> 118](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/118.jpeg) ![<pkmn-modern> 119](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/119.jpeg) ![<pkmn-modern> 120](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/120.jpeg) ![<pkmn-modern> 121](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/121.jpeg) ![<pkmn-modern> 122](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/122.jpeg) ![<pkmn-modern> 123](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/123.jpeg) ![<pkmn-modern> 124](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/124.jpeg) ![<pkmn-modern> 125](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/125.jpeg) ![<pkmn-modern> 126](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/126.jpeg) ![<pkmn-modern> 127](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/127.jpeg) ![<pkmn-modern> 128](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/128.jpeg) ![<pkmn-modern> 129](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/129.jpeg) ![<pkmn-modern> 130](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/130.jpeg) ![<pkmn-modern> 131](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/131.jpeg) ![<pkmn-modern> 132](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/132.jpeg) ![<pkmn-modern> 133](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/133.jpeg) ![<pkmn-modern> 134](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/134.jpeg) ![<pkmn-modern> 135](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/135.jpeg) ![<pkmn-modern> 136](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/136.jpeg) ![<pkmn-modern> 137](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/137.jpeg) ![<pkmn-modern> 138](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/138.jpeg) ![<pkmn-modern> 139](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/139.jpeg) ![<pkmn-modern> 140](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/140.jpeg) ![<pkmn-modern> 141](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/141.jpeg) ![<pkmn-modern> 142](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/142.jpeg) ![<pkmn-modern> 143](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/143.jpeg) ![<pkmn-modern> 144](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/144.jpeg) ![<pkmn-modern> 145](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/145.jpeg) ![<pkmn-modern> 146](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/146.jpeg) ![<pkmn-modern> 147](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/147.jpeg) ![<pkmn-modern> 148](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/148.jpeg) ![<pkmn-modern> 149](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/149.jpeg) ![<pkmn-modern> 150](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/150.jpeg) ![<pkmn-modern> 151](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/151.jpeg) ![<pkmn-modern> 152](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/152.jpeg) ![<pkmn-modern> 153](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/153.jpeg) ![<pkmn-modern> 154](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/154.jpeg) ![<pkmn-modern> 155](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/155.jpeg) ![<pkmn-modern> 156](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/156.jpeg) ![<pkmn-modern> 157](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/157.jpeg) ![<pkmn-modern> 158](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/158.jpeg) ![<pkmn-modern> 159](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/159.jpeg) ![<pkmn-modern> 160](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/160.jpeg) ![<pkmn-modern> 161](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/161.jpeg) ![<pkmn-modern> 162](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/162.jpeg) ![<pkmn-modern> 163](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/163.jpeg) ![<pkmn-modern> 164](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/164.jpeg) ![<pkmn-modern> 165](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/165.jpeg) ![<pkmn-modern> 166](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/166.jpeg) ![<pkmn-modern> 167](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/167.jpeg) ![<pkmn-modern> 168](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/168.jpeg) ![<pkmn-modern> 169](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/169.jpeg) ![<pkmn-modern> 170](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/170.jpeg) ![<pkmn-modern> 171](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/171.jpeg) ![<pkmn-modern> 172](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/172.jpeg) ![<pkmn-modern> 173](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/173.jpeg) ![<pkmn-modern> 174](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/174.jpeg) ![<pkmn-modern> 175](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/175.jpeg) ![<pkmn-modern> 176](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/176.jpeg) ![<pkmn-modern> 177](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/177.jpeg) ![<pkmn-modern> 178](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/178.jpeg) ![<pkmn-modern> 179](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/179.jpeg) ![<pkmn-modern> 180](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/180.jpeg) ![<pkmn-modern> 181](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/181.jpeg) ![<pkmn-modern> 182](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/182.jpeg) ![<pkmn-modern> 183](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/183.jpeg) ![<pkmn-modern> 184](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/184.jpeg) ![<pkmn-modern> 185](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/185.jpeg) ![<pkmn-modern> 186](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/186.jpeg) ![<pkmn-modern> 187](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/187.jpeg) ![<pkmn-modern> 188](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/188.jpeg) ![<pkmn-modern> 189](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/189.jpeg) ![<pkmn-modern> 190](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/190.jpeg) ![<pkmn-modern> 191](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/191.jpeg) ![<pkmn-modern> 192](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/192.jpeg) ![<pkmn-modern> 193](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/193.jpeg) ![<pkmn-modern> 194](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/194.jpeg) ![<pkmn-modern> 195](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/195.jpeg) ![<pkmn-modern> 196](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/196.jpeg) ![<pkmn-modern> 197](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/197.jpeg) ![<pkmn-modern> 198](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/198.jpeg) ![<pkmn-modern> 199](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/199.jpeg) ![<pkmn-modern> 200](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/200.jpeg) ![<pkmn-modern> 201](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/201.jpeg) ![<pkmn-modern> 202](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/202.jpeg) ![<pkmn-modern> 203](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/203.jpeg) ![<pkmn-modern> 204](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/204.jpeg) ![<pkmn-modern> 205](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/205.jpeg) ![<pkmn-modern> 206](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/206.jpeg) ![<pkmn-modern> 207](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/207.jpeg) ![<pkmn-modern> 208](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/208.jpeg) ![<pkmn-modern> 209](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/209.jpeg) ![<pkmn-modern> 210](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/210.jpeg) ![<pkmn-modern> 211](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/211.jpeg) ![<pkmn-modern> 212](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/212.jpeg) ![<pkmn-modern> 213](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/213.jpeg) ![<pkmn-modern> 214](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/214.jpeg) ![<pkmn-modern> 215](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/215.jpeg) ![<pkmn-modern> 216](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/216.jpeg) ![<pkmn-modern> 217](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/217.jpeg) ![<pkmn-modern> 218](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/218.jpeg) ![<pkmn-modern> 219](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/219.jpeg) ![<pkmn-modern> 220](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/220.jpeg) ![<pkmn-modern> 221](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/221.jpeg) ![<pkmn-modern> 222](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/222.jpeg) ![<pkmn-modern> 223](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/223.jpeg) ![<pkmn-modern> 224](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/224.jpeg) ![<pkmn-modern> 225](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/225.jpeg) ![<pkmn-modern> 226](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/226.jpeg) ![<pkmn-modern> 227](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/227.jpeg) ![<pkmn-modern> 228](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/228.jpeg) ![<pkmn-modern> 229](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/229.jpeg) ![<pkmn-modern> 230](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/230.jpeg) ![<pkmn-modern> 231](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/231.jpeg) ![<pkmn-modern> 232](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/232.jpeg) ![<pkmn-modern> 233](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/233.jpeg) ![<pkmn-modern> 234](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/234.jpeg) ![<pkmn-modern> 235](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/235.jpeg) ![<pkmn-modern> 236](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/236.jpeg) ![<pkmn-modern> 237](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/237.jpeg) ![<pkmn-modern> 238](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/238.jpeg) ![<pkmn-modern> 239](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/239.jpeg) ![<pkmn-modern> 240](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/240.jpeg) ![<pkmn-modern> 241](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/241.jpeg) ![<pkmn-modern> 242](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/242.jpeg) ![<pkmn-modern> 243](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/243.jpeg) ![<pkmn-modern> 244](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/244.jpeg) ![<pkmn-modern> 245](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/245.jpeg) ![<pkmn-modern> 246](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/246.jpeg) ![<pkmn-modern> 247](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/247.jpeg) ![<pkmn-modern> 248](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/248.jpeg) ![<pkmn-modern> 249](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/249.jpeg) ![<pkmn-modern> 250](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/250.jpeg) ![<pkmn-modern> 251](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/251.jpeg) ![<pkmn-modern> 252](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/252.jpeg) ![<pkmn-modern> 253](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/253.jpeg) ![<pkmn-modern> 254](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/254.jpeg) ![<pkmn-modern> 255](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/255.jpeg) ![<pkmn-modern> 256](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/256.jpeg) ![<pkmn-modern> 257](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/257.jpeg) ![<pkmn-modern> 258](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/258.jpeg) ![<pkmn-modern> 259](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/259.jpeg) ![<pkmn-modern> 260](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/260.jpeg) ![<pkmn-modern> 261](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/261.jpeg) ![<pkmn-modern> 262](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/262.jpeg) ![<pkmn-modern> 263](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/263.jpeg) ![<pkmn-modern> 264](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/264.jpeg) ![<pkmn-modern> 265](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/265.jpeg) ![<pkmn-modern> 266](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/266.jpeg) ![<pkmn-modern> 267](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/267.jpeg) ![<pkmn-modern> 268](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/268.jpeg) ![<pkmn-modern> 269](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/269.jpeg) ![<pkmn-modern> 270](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/270.jpeg) ![<pkmn-modern> 271](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/271.jpeg) ![<pkmn-modern> 272](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/272.jpeg) ![<pkmn-modern> 273](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/273.jpeg) ![<pkmn-modern> 274](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/274.jpeg) ![<pkmn-modern> 275](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/275.jpeg) ![<pkmn-modern> 276](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/276.jpeg) ![<pkmn-modern> 277](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/277.jpeg) ![<pkmn-modern> 278](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/278.jpeg) ![<pkmn-modern> 279](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/279.jpeg) ![<pkmn-modern> 280](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/280.jpeg) ![<pkmn-modern> 281](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/281.jpeg) ![<pkmn-modern> 282](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/282.jpeg) ![<pkmn-modern> 283](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/283.jpeg) ![<pkmn-modern> 284](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/284.jpeg) ![<pkmn-modern> 285](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/285.jpeg) ![<pkmn-modern> 286](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/286.jpeg) ![<pkmn-modern> 287](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/287.jpeg) ![<pkmn-modern> 288](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/288.jpeg) ![<pkmn-modern> 289](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/289.jpeg) ![<pkmn-modern> 290](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/290.jpeg) ![<pkmn-modern> 291](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/291.jpeg) ![<pkmn-modern> 292](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/292.jpeg) ![<pkmn-modern> 293](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/293.jpeg) ![<pkmn-modern> 294](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/294.jpeg) ![<pkmn-modern> 295](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/295.jpeg) ![<pkmn-modern> 296](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/296.jpeg) ![<pkmn-modern> 297](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/297.jpeg) ![<pkmn-modern> 298](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/298.jpeg) ![<pkmn-modern> 299](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/299.jpeg) ![<pkmn-modern> 300](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/300.jpeg) ![<pkmn-modern> 301](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/301.jpeg) ![<pkmn-modern> 302](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/302.jpeg) ![<pkmn-modern> 303](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/303.jpeg) ![<pkmn-modern> 304](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/304.jpeg) ![<pkmn-modern> 305](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/305.jpeg) ![<pkmn-modern> 306](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/306.jpeg) ![<pkmn-modern> 307](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/307.jpeg) ![<pkmn-modern> 308](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/308.jpeg) ![<pkmn-modern> 309](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/309.jpeg) ![<pkmn-modern> 310](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/310.jpeg) ![<pkmn-modern> 311](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/311.jpeg) ![<pkmn-modern> 312](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/312.jpeg) ![<pkmn-modern> 313](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/313.jpeg) ![<pkmn-modern> 314](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/314.jpeg) ![<pkmn-modern> 315](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/315.jpeg) ![<pkmn-modern> 316](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/316.jpeg) ![<pkmn-modern> 317](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/317.jpeg) ![<pkmn-modern> 318](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/318.jpeg) ![<pkmn-modern> 319](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/319.jpeg) ![<pkmn-modern> 320](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/320.jpeg) ![<pkmn-modern> 321](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/321.jpeg) ![<pkmn-modern> 322](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/322.jpeg) ![<pkmn-modern> 323](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/323.jpeg) ![<pkmn-modern> 324](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/324.jpeg) ![<pkmn-modern> 325](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/325.jpeg) ![<pkmn-modern> 326](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/326.jpeg) ![<pkmn-modern> 327](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/327.jpeg) ![<pkmn-modern> 328](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/328.jpeg) ![<pkmn-modern> 329](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/329.jpeg) ![<pkmn-modern> 330](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/330.jpeg) ![<pkmn-modern> 331](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/331.jpeg) ![<pkmn-modern> 332](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/332.jpeg) ![<pkmn-modern> 333](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/333.jpeg) ![<pkmn-modern> 334](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/334.jpeg) ![<pkmn-modern> 335](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/335.jpeg) ![<pkmn-modern> 336](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/336.jpeg) ![<pkmn-modern> 337](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/337.jpeg) ![<pkmn-modern> 338](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/338.jpeg) ![<pkmn-modern> 339](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/339.jpeg) ![<pkmn-modern> 340](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/340.jpeg) ![<pkmn-modern> 341](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/341.jpeg) ![<pkmn-modern> 342](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/342.jpeg) ![<pkmn-modern> 343](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/343.jpeg) ![<pkmn-modern> 344](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/344.jpeg) ![<pkmn-modern> 345](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/345.jpeg) ![<pkmn-modern> 346](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/346.jpeg) ![<pkmn-modern> 347](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/347.jpeg) ![<pkmn-modern> 348](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/348.jpeg) ![<pkmn-modern> 349](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/349.jpeg) ![<pkmn-modern> 350](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/350.jpeg) ![<pkmn-modern> 351](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/351.jpeg) ![<pkmn-modern> 352](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/352.jpeg) ![<pkmn-modern> 353](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/353.jpeg) ![<pkmn-modern> 354](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/354.jpeg) ![<pkmn-modern> 355](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/355.jpeg) ![<pkmn-modern> 356](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/356.jpeg) ![<pkmn-modern> 357](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/357.jpeg) ![<pkmn-modern> 358](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/358.jpeg) ![<pkmn-modern> 359](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/359.jpeg) ![<pkmn-modern> 360](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/360.jpeg) ![<pkmn-modern> 361](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/361.jpeg) ![<pkmn-modern> 362](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/362.jpeg) ![<pkmn-modern> 363](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/363.jpeg) ![<pkmn-modern> 364](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/364.jpeg) ![<pkmn-modern> 365](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/365.jpeg) ![<pkmn-modern> 366](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/366.jpeg) ![<pkmn-modern> 367](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/367.jpeg) ![<pkmn-modern> 368](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/368.jpeg) ![<pkmn-modern> 369](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/369.jpeg) ![<pkmn-modern> 370](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/370.jpeg) ![<pkmn-modern> 371](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/371.jpeg) ![<pkmn-modern> 372](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/372.jpeg) ![<pkmn-modern> 373](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/373.jpeg) ![<pkmn-modern> 374](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/374.jpeg) ![<pkmn-modern> 375](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/375.jpeg) ![<pkmn-modern> 376](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/376.jpeg) ![<pkmn-modern> 377](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/377.jpeg) ![<pkmn-modern> 378](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/378.jpeg) ![<pkmn-modern> 379](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/379.jpeg) ![<pkmn-modern> 380](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/380.jpeg) ![<pkmn-modern> 381](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/381.jpeg) ![<pkmn-modern> 382](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/382.jpeg) ![<pkmn-modern> 383](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/383.jpeg) ![<pkmn-modern> 384](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/384.jpeg) ![<pkmn-modern> 385](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/385.jpeg) ![<pkmn-modern> 386](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/386.jpeg) ![<pkmn-modern> 387](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/387.jpeg) ![<pkmn-modern> 388](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/388.jpeg) ![<pkmn-modern> 389](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/389.jpeg) ![<pkmn-modern> 390](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/390.jpeg) ![<pkmn-modern> 391](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/391.jpeg) ![<pkmn-modern> 392](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/392.jpeg) ![<pkmn-modern> 393](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/393.jpeg) ![<pkmn-modern> 394](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/394.jpeg) ![<pkmn-modern> 395](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/395.jpeg) ![<pkmn-modern> 396](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/396.jpeg) ![<pkmn-modern> 397](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/397.jpeg) ![<pkmn-modern> 398](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/398.jpeg) ![<pkmn-modern> 399](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/399.jpeg) ![<pkmn-modern> 400](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/400.jpeg) ![<pkmn-modern> 401](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/401.jpeg) ![<pkmn-modern> 402](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/402.jpeg) ![<pkmn-modern> 403](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/403.jpeg) ![<pkmn-modern> 404](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/404.jpeg) ![<pkmn-modern> 405](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/405.jpeg) ![<pkmn-modern> 406](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/406.jpeg) ![<pkmn-modern> 407](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/407.jpeg) ![<pkmn-modern> 408](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/408.jpeg) ![<pkmn-modern> 409](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/409.jpeg) ![<pkmn-modern> 410](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/410.jpeg) ![<pkmn-modern> 411](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/411.jpeg) ![<pkmn-modern> 412](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/412.jpeg) ![<pkmn-modern> 413](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/413.jpeg) ![<pkmn-modern> 414](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/414.jpeg) ![<pkmn-modern> 415](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/415.jpeg) ![<pkmn-modern> 416](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/416.jpeg) ![<pkmn-modern> 417](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/417.jpeg) ![<pkmn-modern> 418](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/418.jpeg) ![<pkmn-modern> 419](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/419.jpeg) ![<pkmn-modern> 420](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/420.jpeg) ![<pkmn-modern> 421](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/421.jpeg) ![<pkmn-modern> 422](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/422.jpeg) ![<pkmn-modern> 423](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/423.jpeg) ![<pkmn-modern> 424](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/424.jpeg) ![<pkmn-modern> 425](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/425.jpeg) ![<pkmn-modern> 426](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/426.jpeg) ![<pkmn-modern> 427](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/427.jpeg) ![<pkmn-modern> 428](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/428.jpeg) ![<pkmn-modern> 429](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/429.jpeg) ![<pkmn-modern> 430](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/430.jpeg) ![<pkmn-modern> 431](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/431.jpeg) ![<pkmn-modern> 432](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/432.jpeg) ![<pkmn-modern> 433](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/433.jpeg) ![<pkmn-modern> 434](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/434.jpeg) ![<pkmn-modern> 435](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/435.jpeg) ![<pkmn-modern> 436](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/436.jpeg) ![<pkmn-modern> 437](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/437.jpeg) ![<pkmn-modern> 438](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/438.jpeg) ![<pkmn-modern> 439](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/439.jpeg) ![<pkmn-modern> 440](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/440.jpeg) ![<pkmn-modern> 441](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/441.jpeg) ![<pkmn-modern> 442](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/442.jpeg) ![<pkmn-modern> 443](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/443.jpeg) ![<pkmn-modern> 444](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/444.jpeg) ![<pkmn-modern> 445](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/445.jpeg) ![<pkmn-modern> 446](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/446.jpeg) ![<pkmn-modern> 447](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/447.jpeg) ![<pkmn-modern> 448](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/448.jpeg) ![<pkmn-modern> 449](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/449.jpeg) ![<pkmn-modern> 450](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/450.jpeg) ![<pkmn-modern> 451](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/451.jpeg) ![<pkmn-modern> 452](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/452.jpeg) ![<pkmn-modern> 453](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/453.jpeg) ![<pkmn-modern> 454](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/454.jpeg) ![<pkmn-modern> 455](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/455.jpeg) ![<pkmn-modern> 456](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/456.jpeg) ![<pkmn-modern> 457](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/457.jpeg) ![<pkmn-modern> 458](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/458.jpeg) ![<pkmn-modern> 459](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/459.jpeg) ![<pkmn-modern> 460](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/460.jpeg) ![<pkmn-modern> 461](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/461.jpeg) ![<pkmn-modern> 462](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/462.jpeg) ![<pkmn-modern> 463](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/463.jpeg) ![<pkmn-modern> 464](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/464.jpeg) ![<pkmn-modern> 465](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/465.jpeg) ![<pkmn-modern> 466](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/466.jpeg) ![<pkmn-modern> 467](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/467.jpeg) ![<pkmn-modern> 468](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/468.jpeg) ![<pkmn-modern> 469](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/469.jpeg) ![<pkmn-modern> 470](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/470.jpeg) ![<pkmn-modern> 471](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/471.jpeg) ![<pkmn-modern> 472](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/472.jpeg) ![<pkmn-modern> 473](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/473.jpeg) ![<pkmn-modern> 474](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/474.jpeg) ![<pkmn-modern> 475](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/475.jpeg) ![<pkmn-modern> 476](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/476.jpeg) ![<pkmn-modern> 477](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/477.jpeg) ![<pkmn-modern> 478](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/478.jpeg) ![<pkmn-modern> 479](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/479.jpeg) ![<pkmn-modern> 480](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/480.jpeg) ![<pkmn-modern> 481](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/481.jpeg) ![<pkmn-modern> 482](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/482.jpeg) ![<pkmn-modern> 483](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/483.jpeg) ![<pkmn-modern> 484](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/484.jpeg) ![<pkmn-modern> 485](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/485.jpeg) ![<pkmn-modern> 486](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/486.jpeg) ![<pkmn-modern> 487](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/487.jpeg) ![<pkmn-modern> 488](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/488.jpeg) ![<pkmn-modern> 489](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/489.jpeg) ![<pkmn-modern> 490](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/490.jpeg) ![<pkmn-modern> 491](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/491.jpeg) ![<pkmn-modern> 492](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/492.jpeg) ![<pkmn-modern> 493](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/493.jpeg) ![<pkmn-modern> 494](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/494.jpeg) ![<pkmn-modern> 495](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/495.jpeg) ![<pkmn-modern> 496](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/496.jpeg) ![<pkmn-modern> 497](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/497.jpeg) ![<pkmn-modern> 498](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/498.jpeg) ![<pkmn-modern> 499](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/499.jpeg) ![<pkmn-modern> 500](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/500.jpeg) ![<pkmn-modern> 501](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/501.jpeg) ![<pkmn-modern> 502](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/502.jpeg) ![<pkmn-modern> 503](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/503.jpeg) ![<pkmn-modern> 504](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/504.jpeg) ![<pkmn-modern> 505](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/505.jpeg) ![<pkmn-modern> 506](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/506.jpeg) ![<pkmn-modern> 507](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/507.jpeg) ![<pkmn-modern> 508](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/508.jpeg) ![<pkmn-modern> 509](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/509.jpeg) ![<pkmn-modern> 510](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/510.jpeg) ![<pkmn-modern> 511](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/511.jpeg) ![<pkmn-modern> 512](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/512.jpeg) ![<pkmn-modern> 513](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/513.jpeg) ![<pkmn-modern> 514](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/514.jpeg) ![<pkmn-modern> 515](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/515.jpeg) ![<pkmn-modern> 516](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/516.jpeg) ![<pkmn-modern> 517](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/517.jpeg) ![<pkmn-modern> 518](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/518.jpeg) ![<pkmn-modern> 519](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/519.jpeg) ![<pkmn-modern> 520](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/520.jpeg) ![<pkmn-modern> 521](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/521.jpeg) ![<pkmn-modern> 522](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/522.jpeg) ![<pkmn-modern> 523](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/523.jpeg) ![<pkmn-modern> 524](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/524.jpeg) ![<pkmn-modern> 525](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/525.jpeg) ![<pkmn-modern> 526](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/526.jpeg) ![<pkmn-modern> 527](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/527.jpeg) ![<pkmn-modern> 528](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/528.jpeg) ![<pkmn-modern> 529](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/529.jpeg) ![<pkmn-modern> 530](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/530.jpeg) ![<pkmn-modern> 531](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/531.jpeg) ![<pkmn-modern> 532](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/532.jpeg) ![<pkmn-modern> 533](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/533.jpeg) ![<pkmn-modern> 534](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/534.jpeg) ![<pkmn-modern> 535](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/535.jpeg) ![<pkmn-modern> 536](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/536.jpeg) ![<pkmn-modern> 537](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/537.jpeg) ![<pkmn-modern> 538](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/538.jpeg) ![<pkmn-modern> 539](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/539.jpeg) ![<pkmn-modern> 540](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/540.jpeg) ![<pkmn-modern> 541](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/541.jpeg) ![<pkmn-modern> 542](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/542.jpeg) ![<pkmn-modern> 543](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/543.jpeg) ![<pkmn-modern> 544](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/544.jpeg) ![<pkmn-modern> 545](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/545.jpeg) ![<pkmn-modern> 546](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/546.jpeg) ![<pkmn-modern> 547](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/547.jpeg) ![<pkmn-modern> 548](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/548.jpeg) ![<pkmn-modern> 549](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/549.jpeg) ![<pkmn-modern> 550](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/550.jpeg) ![<pkmn-modern> 551](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/551.jpeg) ![<pkmn-modern> 552](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/552.jpeg) ![<pkmn-modern> 553](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/553.jpeg) ![<pkmn-modern> 554](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/554.jpeg) ![<pkmn-modern> 555](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/555.jpeg) ![<pkmn-modern> 556](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/556.jpeg) ![<pkmn-modern> 557](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/557.jpeg) ![<pkmn-modern> 558](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/558.jpeg) ![<pkmn-modern> 559](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/559.jpeg) ![<pkmn-modern> 560](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/560.jpeg) ![<pkmn-modern> 561](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/561.jpeg) ![<pkmn-modern> 562](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/562.jpeg) ![<pkmn-modern> 563](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/563.jpeg) ![<pkmn-modern> 564](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/564.jpeg) ![<pkmn-modern> 565](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/565.jpeg) ![<pkmn-modern> 566](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/566.jpeg) ![<pkmn-modern> 567](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/567.jpeg) ![<pkmn-modern> 568](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/568.jpeg) ![<pkmn-modern> 569](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/569.jpeg) ![<pkmn-modern> 570](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/570.jpeg) ![<pkmn-modern> 571](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/571.jpeg) ![<pkmn-modern> 572](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/572.jpeg) ![<pkmn-modern> 573](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/573.jpeg) ![<pkmn-modern> 574](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/574.jpeg) ![<pkmn-modern> 575](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/575.jpeg) ![<pkmn-modern> 576](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/576.jpeg) ![<pkmn-modern> 577](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/577.jpeg) ![<pkmn-modern> 578](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/578.jpeg) ![<pkmn-modern> 579](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/579.jpeg) ![<pkmn-modern> 580](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/580.jpeg) ![<pkmn-modern> 581](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/581.jpeg) ![<pkmn-modern> 582](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/582.jpeg) ![<pkmn-modern> 583](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/583.jpeg) ![<pkmn-modern> 584](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/584.jpeg) ![<pkmn-modern> 585](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/585.jpeg) ![<pkmn-modern> 586](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/586.jpeg) ![<pkmn-modern> 587](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/587.jpeg) ![<pkmn-modern> 588](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/588.jpeg) ![<pkmn-modern> 589](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/589.jpeg) ![<pkmn-modern> 590](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/590.jpeg) ![<pkmn-modern> 591](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/591.jpeg) ![<pkmn-modern> 592](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/592.jpeg) ![<pkmn-modern> 593](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/593.jpeg) ![<pkmn-modern> 594](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/594.jpeg) ![<pkmn-modern> 595](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/595.jpeg) ![<pkmn-modern> 596](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/596.jpeg) ![<pkmn-modern> 597](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/597.jpeg) ![<pkmn-modern> 598](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/598.jpeg) ![<pkmn-modern> 599](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/599.jpeg) ![<pkmn-modern> 600](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/600.jpeg) ![<pkmn-modern> 601](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/601.jpeg) ![<pkmn-modern> 602](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/602.jpeg) ![<pkmn-modern> 603](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/603.jpeg) ![<pkmn-modern> 604](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/604.jpeg) ![<pkmn-modern> 605](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/605.jpeg) ![<pkmn-modern> 606](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/606.jpeg) ![<pkmn-modern> 607](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/607.jpeg) ![<pkmn-modern> 608](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/608.jpeg) ![<pkmn-modern> 609](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/609.jpeg) ![<pkmn-modern> 610](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/610.jpeg) ![<pkmn-modern> 611](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/611.jpeg) ![<pkmn-modern> 612](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/612.jpeg) ![<pkmn-modern> 613](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/613.jpeg) ![<pkmn-modern> 614](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/614.jpeg) ![<pkmn-modern> 615](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/615.jpeg) ![<pkmn-modern> 616](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/616.jpeg) ![<pkmn-modern> 617](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/617.jpeg) ![<pkmn-modern> 618](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/618.jpeg) ![<pkmn-modern> 619](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/619.jpeg) ![<pkmn-modern> 620](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/620.jpeg) ![<pkmn-modern> 621](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/621.jpeg) ![<pkmn-modern> 622](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/622.jpeg) ![<pkmn-modern> 623](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/623.jpeg) ![<pkmn-modern> 624](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/624.jpeg) ![<pkmn-modern> 625](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/625.jpeg) ![<pkmn-modern> 626](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/626.jpeg) ![<pkmn-modern> 627](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/627.jpeg) ![<pkmn-modern> 628](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/628.jpeg) ![<pkmn-modern> 629](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/629.jpeg) ![<pkmn-modern> 630](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/630.jpeg) ![<pkmn-modern> 631](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/631.jpeg) ![<pkmn-modern> 632](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/632.jpeg) ![<pkmn-modern> 633](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/633.jpeg) ![<pkmn-modern> 634](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/634.jpeg) ![<pkmn-modern> 635](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/635.jpeg) ![<pkmn-modern> 636](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/636.jpeg) ![<pkmn-modern> 637](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/637.jpeg) ![<pkmn-modern> 638](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/638.jpeg) ![<pkmn-modern> 639](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/639.jpeg) ![<pkmn-modern> 640](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/640.jpeg) ![<pkmn-modern> 641](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/641.jpeg) ![<pkmn-modern> 642](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/642.jpeg) ![<pkmn-modern> 643](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/643.jpeg) ![<pkmn-modern> 644](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/644.jpeg) ![<pkmn-modern> 645](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/645.jpeg) ![<pkmn-modern> 646](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/646.jpeg) ![<pkmn-modern> 647](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/647.jpeg) ![<pkmn-modern> 648](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/648.jpeg) ![<pkmn-modern> 649](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/649.jpeg) ![<pkmn-modern> 650](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/650.jpeg) ![<pkmn-modern> 651](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/651.jpeg) ![<pkmn-modern> 652](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/652.jpeg) ![<pkmn-modern> 653](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/653.jpeg) ![<pkmn-modern> 654](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/654.jpeg) ![<pkmn-modern> 655](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/655.jpeg) ![<pkmn-modern> 656](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/656.jpeg) ![<pkmn-modern> 657](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/657.jpeg) ![<pkmn-modern> 658](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/658.jpeg) ![<pkmn-modern> 659](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/659.jpeg) ![<pkmn-modern> 660](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/660.jpeg) ![<pkmn-modern> 661](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/661.jpeg) ![<pkmn-modern> 662](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/662.jpeg) ![<pkmn-modern> 663](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/663.jpeg) ![<pkmn-modern> 664](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/664.jpeg) ![<pkmn-modern> 665](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/665.jpeg) ![<pkmn-modern> 666](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/666.jpeg) ![<pkmn-modern> 667](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/667.jpeg) ![<pkmn-modern> 668](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/668.jpeg) ![<pkmn-modern> 669](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/669.jpeg) ![<pkmn-modern> 670](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/670.jpeg) ![<pkmn-modern> 671](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/671.jpeg) ![<pkmn-modern> 672](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/672.jpeg) ![<pkmn-modern> 673](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/673.jpeg) ![<pkmn-modern> 674](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/674.jpeg) ![<pkmn-modern> 675](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/675.jpeg) ![<pkmn-modern> 676](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/676.jpeg) ![<pkmn-modern> 677](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/677.jpeg) ![<pkmn-modern> 678](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/678.jpeg) ![<pkmn-modern> 679](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/679.jpeg) ![<pkmn-modern> 680](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/680.jpeg) ![<pkmn-modern> 681](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/681.jpeg) ![<pkmn-modern> 682](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/682.jpeg) ![<pkmn-modern> 683](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/683.jpeg) ![<pkmn-modern> 684](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/684.jpeg) ![<pkmn-modern> 685](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/685.jpeg) ![<pkmn-modern> 686](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/686.jpeg) ![<pkmn-modern> 687](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/687.jpeg) ![<pkmn-modern> 688](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/688.jpeg) ![<pkmn-modern> 689](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/689.jpeg) ![<pkmn-modern> 690](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/690.jpeg) ![<pkmn-modern> 691](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/691.jpeg) ![<pkmn-modern> 692](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/692.jpeg) ![<pkmn-modern> 693](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/693.jpeg) ![<pkmn-modern> 694](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/694.jpeg) ![<pkmn-modern> 695](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/695.jpeg) ![<pkmn-modern> 696](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/696.jpeg) ![<pkmn-modern> 697](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/697.jpeg) ![<pkmn-modern> 698](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/698.jpeg) ![<pkmn-modern> 699](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/699.jpeg) ![<pkmn-modern> 700](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/700.jpeg) ![<pkmn-modern> 701](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/701.jpeg) ![<pkmn-modern> 702](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/702.jpeg) ![<pkmn-modern> 703](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/703.jpeg) ![<pkmn-modern> 704](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/704.jpeg) ![<pkmn-modern> 705](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/705.jpeg) ![<pkmn-modern> 706](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/706.jpeg) ![<pkmn-modern> 707](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/707.jpeg) ![<pkmn-modern> 708](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/708.jpeg) ![<pkmn-modern> 709](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/709.jpeg) ![<pkmn-modern> 710](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/710.jpeg) ![<pkmn-modern> 711](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/711.jpeg) ![<pkmn-modern> 712](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/712.jpeg) ![<pkmn-modern> 713](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/713.jpeg) ![<pkmn-modern> 714](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/714.jpeg) ![<pkmn-modern> 715](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/715.jpeg) ![<pkmn-modern> 716](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/716.jpeg) ![<pkmn-modern> 717](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/717.jpeg) ![<pkmn-modern> 718](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/718.jpeg) ![<pkmn-modern> 719](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/719.jpeg) ![<pkmn-modern> 720](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/720.jpeg) ![<pkmn-modern> 721](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/721.jpeg) ![<pkmn-modern> 722](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/722.jpeg) ![<pkmn-modern> 723](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/723.jpeg) ![<pkmn-modern> 724](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/724.jpeg) ![<pkmn-modern> 725](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/725.jpeg) ![<pkmn-modern> 726](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/726.jpeg) ![<pkmn-modern> 727](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/727.jpeg) ![<pkmn-modern> 728](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/728.jpeg) ![<pkmn-modern> 729](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/729.jpeg) ![<pkmn-modern> 730](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/730.jpeg) ![<pkmn-modern> 731](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/731.jpeg) ![<pkmn-modern> 732](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/732.jpeg) ![<pkmn-modern> 733](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/733.jpeg) ![<pkmn-modern> 734](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/734.jpeg) ![<pkmn-modern> 735](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/735.jpeg) ![<pkmn-modern> 736](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/736.jpeg) ![<pkmn-modern> 737](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/737.jpeg) ![<pkmn-modern> 738](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/738.jpeg) ![<pkmn-modern> 739](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/739.jpeg) ![<pkmn-modern> 740](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/740.jpeg) ![<pkmn-modern> 741](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/741.jpeg) ![<pkmn-modern> 742](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/742.jpeg) ![<pkmn-modern> 743](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/743.jpeg) ![<pkmn-modern> 744](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/744.jpeg) ![<pkmn-modern> 745](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/745.jpeg) ![<pkmn-modern> 746](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/746.jpeg) ![<pkmn-modern> 747](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/747.jpeg) ![<pkmn-modern> 748](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/748.jpeg) ![<pkmn-modern> 749](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/749.jpeg) ![<pkmn-modern> 750](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/750.jpeg) ![<pkmn-modern> 751](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/751.jpeg) ![<pkmn-modern> 752](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/752.jpeg) ![<pkmn-modern> 753](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/753.jpeg) ![<pkmn-modern> 754](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/754.jpeg) ![<pkmn-modern> 755](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/755.jpeg) ![<pkmn-modern> 756](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/756.jpeg) ![<pkmn-modern> 757](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/757.jpeg) ![<pkmn-modern> 758](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/758.jpeg) ![<pkmn-modern> 759](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/759.jpeg) ![<pkmn-modern> 760](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/760.jpeg) ![<pkmn-modern> 761](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/761.jpeg) ![<pkmn-modern> 762](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/762.jpeg) ![<pkmn-modern> 763](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/763.jpeg) ![<pkmn-modern> 764](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/764.jpeg) ![<pkmn-modern> 765](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/765.jpeg) ![<pkmn-modern> 766](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/766.jpeg) ![<pkmn-modern> 767](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/767.jpeg) ![<pkmn-modern> 768](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/768.jpeg) ![<pkmn-modern> 769](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/769.jpeg) ![<pkmn-modern> 770](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/770.jpeg) ![<pkmn-modern> 771](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/771.jpeg) ![<pkmn-modern> 772](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/772.jpeg) ![<pkmn-modern> 773](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/773.jpeg) ![<pkmn-modern> 774](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/774.jpeg) ![<pkmn-modern> 775](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/775.jpeg) ![<pkmn-modern> 776](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/776.jpeg) ![<pkmn-modern> 777](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/777.jpeg) ![<pkmn-modern> 778](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/778.jpeg) ![<pkmn-modern> 779](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/779.jpeg) ![<pkmn-modern> 780](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/780.jpeg) ![<pkmn-modern> 781](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/781.jpeg) ![<pkmn-modern> 782](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/782.jpeg) ![<pkmn-modern> 783](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/783.jpeg) ![<pkmn-modern> 784](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/784.jpeg) ![<pkmn-modern> 785](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/785.jpeg) ![<pkmn-modern> 786](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/786.jpeg) ![<pkmn-modern> 787](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/787.jpeg) ![<pkmn-modern> 788](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/788.jpeg) ![<pkmn-modern> 789](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/789.jpeg) ![<pkmn-modern> 790](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/790.jpeg) ![<pkmn-modern> 791](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/791.jpeg) ![<pkmn-modern> 792](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/792.jpeg) ![<pkmn-modern> 793](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/793.jpeg) ![<pkmn-modern> 794](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/794.jpeg) ![<pkmn-modern> 795](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/795.jpeg) ![<pkmn-modern> 796](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/796.jpeg) ![<pkmn-modern> 797](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/797.jpeg) ![<pkmn-modern> 798](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/798.jpeg) ![<pkmn-modern> 799](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/799.jpeg) ![<pkmn-modern> 800](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/800.jpeg) ![<pkmn-modern> 801](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/801.jpeg) ![<pkmn-modern> 802](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/802.jpeg) ![<pkmn-modern> 803](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/803.jpeg) ![<pkmn-modern> 804](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/804.jpeg) ![<pkmn-modern> 805](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/805.jpeg) ![<pkmn-modern> 806](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/806.jpeg) ![<pkmn-modern> 807](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/807.jpeg) ![<pkmn-modern> 808](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/808.jpeg) ![<pkmn-modern> 809](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/809.jpeg) ![<pkmn-modern> 810](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/810.jpeg) ![<pkmn-modern> 811](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/811.jpeg) ![<pkmn-modern> 812](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/812.jpeg) ![<pkmn-modern> 813](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/813.jpeg) ![<pkmn-modern> 814](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/814.jpeg) ![<pkmn-modern> 815](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/815.jpeg) ![<pkmn-modern> 816](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/816.jpeg) ![<pkmn-modern> 817](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/817.jpeg) ![<pkmn-modern> 818](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/818.jpeg) ![<pkmn-modern> 819](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/819.jpeg) ![<pkmn-modern> 820](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/820.jpeg) ![<pkmn-modern> 821](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/821.jpeg) ![<pkmn-modern> 822](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/822.jpeg) ![<pkmn-modern> 823](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/823.jpeg) ![<pkmn-modern> 824](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/824.jpeg) ![<pkmn-modern> 825](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/825.jpeg) ![<pkmn-modern> 826](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/826.jpeg) ![<pkmn-modern> 827](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/827.jpeg) ![<pkmn-modern> 828](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/828.jpeg) ![<pkmn-modern> 829](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/829.jpeg) ![<pkmn-modern> 830](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/830.jpeg) ![<pkmn-modern> 831](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/831.jpeg) ![<pkmn-modern> 832](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/832.jpeg) ![<pkmn-modern> 833](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/833.jpeg) ![<pkmn-modern> 834](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/834.jpeg) ![<pkmn-modern> 835](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/835.jpeg) ![<pkmn-modern> 836](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/836.jpeg) ![<pkmn-modern> 837](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/837.jpeg) ![<pkmn-modern> 838](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/838.jpeg) ![<pkmn-modern> 839](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/839.jpeg) ![<pkmn-modern> 840](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/840.jpeg) ![<pkmn-modern> 841](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/841.jpeg) ![<pkmn-modern> 842](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/842.jpeg) ![<pkmn-modern> 843](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/843.jpeg) ![<pkmn-modern> 844](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/844.jpeg) ![<pkmn-modern> 845](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/845.jpeg) ![<pkmn-modern> 846](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/846.jpeg) ![<pkmn-modern> 847](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/847.jpeg) ![<pkmn-modern> 848](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/848.jpeg) ![<pkmn-modern> 849](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/849.jpeg) ![<pkmn-modern> 850](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/850.jpeg) ![<pkmn-modern> 851](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/851.jpeg) ![<pkmn-modern> 852](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/852.jpeg) ![<pkmn-modern> 853](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/853.jpeg) ![<pkmn-modern> 854](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/854.jpeg) ![<pkmn-modern> 855](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/855.jpeg) ![<pkmn-modern> 856](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/856.jpeg) ![<pkmn-modern> 857](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/857.jpeg) ![<pkmn-modern> 858](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/858.jpeg) ![<pkmn-modern> 859](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/859.jpeg) ![<pkmn-modern> 860](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/860.jpeg) ![<pkmn-modern> 861](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/861.jpeg) ![<pkmn-modern> 862](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/862.jpeg) ![<pkmn-modern> 863](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/863.jpeg) ![<pkmn-modern> 864](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/864.jpeg) ![<pkmn-modern> 865](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/865.jpeg) ![<pkmn-modern> 866](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/866.jpeg) ![<pkmn-modern> 867](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/867.jpeg) ![<pkmn-modern> 868](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/868.jpeg) ![<pkmn-modern> 869](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/869.jpeg) ![<pkmn-modern> 870](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/870.jpeg) ![<pkmn-modern> 871](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/871.jpeg) ![<pkmn-modern> 872](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/872.jpeg) ![<pkmn-modern> 873](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/873.jpeg) ![<pkmn-modern> 874](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/874.jpeg) ![<pkmn-modern> 875](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/875.jpeg) ![<pkmn-modern> 876](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/876.jpeg) ![<pkmn-modern> 877](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/877.jpeg) ![<pkmn-modern> 878](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/878.jpeg) ![<pkmn-modern> 879](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/879.jpeg) ![<pkmn-modern> 880](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/880.jpeg) ![<pkmn-modern> 881](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/881.jpeg) ![<pkmn-modern> 882](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/882.jpeg) ![<pkmn-modern> 883](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/883.jpeg) ![<pkmn-modern> 884](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/884.jpeg) ![<pkmn-modern> 885](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/885.jpeg) ![<pkmn-modern> 886](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/886.jpeg) ![<pkmn-modern> 887](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/887.jpeg) ![<pkmn-modern> 888](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/888.jpeg) ![<pkmn-modern> 889](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/889.jpeg) ![<pkmn-modern> 890](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/890.jpeg) ![<pkmn-modern> 891](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/891.jpeg) ![<pkmn-modern> 892](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/892.jpeg) ![<pkmn-modern> 893](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/893.jpeg) ![<pkmn-modern> 894](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/894.jpeg) ![<pkmn-modern> 895](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/895.jpeg) ![<pkmn-modern> 896](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/896.jpeg) ![<pkmn-modern> 897](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/897.jpeg) ![<pkmn-modern> 898](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/898.jpeg) ![<pkmn-modern> 899](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/899.jpeg) ![<pkmn-modern> 900](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/900.jpeg) ![<pkmn-modern> 901](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/901.jpeg) ![<pkmn-modern> 902](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/902.jpeg) ![<pkmn-modern> 903](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/903.jpeg) ![<pkmn-modern> 904](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/904.jpeg) ![<pkmn-modern> 905](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/905.jpeg) ![<pkmn-modern> 906](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/906.jpeg) ![<pkmn-modern> 907](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/907.jpeg) ![<pkmn-modern> 908](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/908.jpeg) ![<pkmn-modern> 909](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/909.jpeg) ![<pkmn-modern> 910](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/910.jpeg) ![<pkmn-modern> 911](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/911.jpeg) ![<pkmn-modern> 912](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/912.jpeg) ![<pkmn-modern> 913](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/913.jpeg) ![<pkmn-modern> 914](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/914.jpeg) ![<pkmn-modern> 915](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/915.jpeg) ![<pkmn-modern> 916](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/916.jpeg) ![<pkmn-modern> 917](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/917.jpeg) ![<pkmn-modern> 918](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/918.jpeg) ![<pkmn-modern> 919](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/919.jpeg) ![<pkmn-modern> 920](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/920.jpeg) ![<pkmn-modern> 921](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/921.jpeg) ![<pkmn-modern> 922](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/922.jpeg) ![<pkmn-modern> 923](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/923.jpeg) ![<pkmn-modern> 924](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/924.jpeg) ![<pkmn-modern> 925](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/925.jpeg) ![<pkmn-modern> 926](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/926.jpeg) ![<pkmn-modern> 927](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/927.jpeg) ![<pkmn-modern> 928](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/928.jpeg) ![<pkmn-modern> 929](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/929.jpeg) ![<pkmn-modern> 930](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/930.jpeg) ![<pkmn-modern> 931](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/931.jpeg) ![<pkmn-modern> 932](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/932.jpeg) ![<pkmn-modern> 933](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/933.jpeg) ![<pkmn-modern> 934](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/934.jpeg) ![<pkmn-modern> 935](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/935.jpeg) ![<pkmn-modern> 936](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/936.jpeg) ![<pkmn-modern> 937](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/937.jpeg) ![<pkmn-modern> 938](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/938.jpeg) ![<pkmn-modern> 939](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/939.jpeg) ![<pkmn-modern> 940](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/940.jpeg) ![<pkmn-modern> 941](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/941.jpeg) ![<pkmn-modern> 942](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/942.jpeg) ![<pkmn-modern> 943](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/943.jpeg) ![<pkmn-modern> 944](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/944.jpeg) ![<pkmn-modern> 945](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/945.jpeg) ![<pkmn-modern> 946](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/946.jpeg) ![<pkmn-modern> 947](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/947.jpeg) ![<pkmn-modern> 948](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/948.jpeg) ![<pkmn-modern> 949](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/949.jpeg) ![<pkmn-modern> 950](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/950.jpeg) ![<pkmn-modern> 951](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/951.jpeg) ![<pkmn-modern> 952](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/952.jpeg) ![<pkmn-modern> 953](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/953.jpeg) ![<pkmn-modern> 954](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/954.jpeg) ![<pkmn-modern> 955](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/955.jpeg) ![<pkmn-modern> 956](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/956.jpeg) ![<pkmn-modern> 957](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/957.jpeg) ![<pkmn-modern> 958](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/958.jpeg) ![<pkmn-modern> 959](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/959.jpeg) ![<pkmn-modern> 960](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/960.jpeg) ![<pkmn-modern> 961](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/961.jpeg) ![<pkmn-modern> 962](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/962.jpeg) ![<pkmn-modern> 963](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/963.jpeg) ![<pkmn-modern> 964](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/964.jpeg) ![<pkmn-modern> 965](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/965.jpeg) ![<pkmn-modern> 966](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/966.jpeg) ![<pkmn-modern> 967](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/967.jpeg) ![<pkmn-modern> 968](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/968.jpeg) ![<pkmn-modern> 969](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/969.jpeg) ![<pkmn-modern> 970](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/970.jpeg) ![<pkmn-modern> 971](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/971.jpeg) ![<pkmn-modern> 972](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/972.jpeg) ![<pkmn-modern> 973](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/973.jpeg) ![<pkmn-modern> 974](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/974.jpeg) ![<pkmn-modern> 975](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/975.jpeg) ![<pkmn-modern> 976](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/976.jpeg) ![<pkmn-modern> 977](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/977.jpeg) ![<pkmn-modern> 978](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/978.jpeg) ![<pkmn-modern> 979](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/979.jpeg) ![<pkmn-modern> 980](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/980.jpeg) ![<pkmn-modern> 981](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/981.jpeg) ![<pkmn-modern> 982](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/982.jpeg) ![<pkmn-modern> 983](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/983.jpeg) ![<pkmn-modern> 984](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/984.jpeg) ![<pkmn-modern> 985](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/985.jpeg) ![<pkmn-modern> 986](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/986.jpeg) ![<pkmn-modern> 987](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/987.jpeg) ![<pkmn-modern> 988](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/988.jpeg) ![<pkmn-modern> 989](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/989.jpeg) ![<pkmn-modern> 990](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/990.jpeg) ![<pkmn-modern> 991](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/991.jpeg) ![<pkmn-modern> 992](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/992.jpeg) ![<pkmn-modern> 993](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/993.jpeg) ![<pkmn-modern> 994](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/994.jpeg) ![<pkmn-modern> 995](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/995.jpeg) ![<pkmn-modern> 996](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/996.jpeg) ![<pkmn-modern> 997](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/997.jpeg) ![<pkmn-modern> 998](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/998.jpeg) ![<pkmn-modern> 999](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/999.jpeg) ![<pkmn-modern> 1000](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1000.jpeg) ![<pkmn-modern> 1001](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1001.jpeg) ![<pkmn-modern> 1002](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1002.jpeg) ![<pkmn-modern> 1003](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1003.jpeg) ![<pkmn-modern> 1004](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1004.jpeg) ![<pkmn-modern> 1005](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1005.jpeg) ![<pkmn-modern> 1006](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1006.jpeg) ![<pkmn-modern> 1007](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1007.jpeg) ![<pkmn-modern> 1008](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1008.jpeg) ![<pkmn-modern> 1009](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1009.jpeg) ![<pkmn-modern> 1010](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1010.jpeg) ![<pkmn-modern> 1011](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1011.jpeg) ![<pkmn-modern> 1012](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1012.jpeg) ![<pkmn-modern> 1013](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1013.jpeg) ![<pkmn-modern> 1014](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1014.jpeg) ![<pkmn-modern> 1015](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1015.jpeg) ![<pkmn-modern> 1016](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1016.jpeg) ![<pkmn-modern> 1017](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1017.jpeg) ![<pkmn-modern> 1018](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1018.jpeg) ![<pkmn-modern> 1019](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1019.jpeg) ![<pkmn-modern> 1020](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1020.jpeg) ![<pkmn-modern> 1021](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1021.jpeg) ![<pkmn-modern> 1022](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1022.jpeg) ![<pkmn-modern> 1023](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1023.jpeg) ![<pkmn-modern> 1024](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1024.jpeg) ![<pkmn-modern> 1025](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1025.jpeg) ![<pkmn-modern> 1026](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1026.jpeg) ![<pkmn-modern> 1027](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1027.jpeg) ![<pkmn-modern> 1028](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1028.jpeg) ![<pkmn-modern> 1029](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1029.jpeg) ![<pkmn-modern> 1030](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1030.jpeg) ![<pkmn-modern> 1031](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1031.jpeg) ![<pkmn-modern> 1032](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1032.jpeg) ![<pkmn-modern> 1033](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1033.jpeg) ![<pkmn-modern> 1034](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1034.jpeg) ![<pkmn-modern> 1035](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1035.jpeg) ![<pkmn-modern> 1036](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1036.jpeg) ![<pkmn-modern> 1037](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1037.jpeg) ![<pkmn-modern> 1038](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1038.jpeg) ![<pkmn-modern> 1039](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1039.jpeg) ![<pkmn-modern> 1040](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1040.jpeg) ![<pkmn-modern> 1041](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1041.jpeg) ![<pkmn-modern> 1042](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1042.jpeg) ![<pkmn-modern> 1043](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1043.jpeg) ![<pkmn-modern> 1044](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1044.jpeg) ![<pkmn-modern> 1045](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1045.jpeg) ![<pkmn-modern> 1046](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1046.jpeg) ![<pkmn-modern> 1047](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1047.jpeg) ![<pkmn-modern> 1048](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1048.jpeg) ![<pkmn-modern> 1049](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1049.jpeg) ![<pkmn-modern> 1050](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1050.jpeg) ![<pkmn-modern> 1051](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1051.jpeg) ![<pkmn-modern> 1052](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1052.jpeg) ![<pkmn-modern> 1053](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1053.jpeg) ![<pkmn-modern> 1054](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1054.jpeg) ![<pkmn-modern> 1055](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1055.jpeg) ![<pkmn-modern> 1056](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1056.jpeg) ![<pkmn-modern> 1057](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1057.jpeg) ![<pkmn-modern> 1058](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1058.jpeg) ![<pkmn-modern> 1059](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1059.jpeg) ![<pkmn-modern> 1060](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1060.jpeg) ![<pkmn-modern> 1061](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1061.jpeg) ![<pkmn-modern> 1062](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1062.jpeg) ![<pkmn-modern> 1063](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1063.jpeg) ![<pkmn-modern> 1064](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1064.jpeg) ![<pkmn-modern> 1065](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1065.jpeg) ![<pkmn-modern> 1066](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1066.jpeg) ![<pkmn-modern> 1067](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1067.jpeg) ![<pkmn-modern> 1068](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1068.jpeg) ![<pkmn-modern> 1069](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1069.jpeg) ![<pkmn-modern> 1070](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1070.jpeg) ![<pkmn-modern> 1071](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1071.jpeg) ![<pkmn-modern> 1072](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1072.jpeg) ![<pkmn-modern> 1073](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1073.jpeg) ![<pkmn-modern> 1074](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1074.jpeg) ![<pkmn-modern> 1075](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1075.jpeg) ![<pkmn-modern> 1076](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1076.jpeg) ![<pkmn-modern> 1077](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1077.jpeg) ![<pkmn-modern> 1078](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1078.jpeg) ![<pkmn-modern> 1079](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1079.jpeg) ![<pkmn-modern> 1080](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1080.jpeg) ![<pkmn-modern> 1081](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1081.jpeg) ![<pkmn-modern> 1082](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1082.jpeg) ![<pkmn-modern> 1083](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1083.jpeg) ![<pkmn-modern> 1084](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1084.jpeg) ![<pkmn-modern> 1085](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1085.jpeg) ![<pkmn-modern> 1086](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1086.jpeg) ![<pkmn-modern> 1087](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1087.jpeg) ![<pkmn-modern> 1088](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1088.jpeg) ![<pkmn-modern> 1089](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1089.jpeg) ![<pkmn-modern> 1090](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1090.jpeg) ![<pkmn-modern> 1091](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1091.jpeg) ![<pkmn-modern> 1092](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1092.jpeg) ![<pkmn-modern> 1093](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1093.jpeg) ![<pkmn-modern> 1094](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1094.jpeg) ![<pkmn-modern> 1095](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1095.jpeg) ![<pkmn-modern> 1096](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1096.jpeg) ![<pkmn-modern> 1097](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1097.jpeg) ![<pkmn-modern> 1098](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1098.jpeg) ![<pkmn-modern> 1099](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1099.jpeg) ![<pkmn-modern> 1100](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1100.jpeg) ![<pkmn-modern> 1101](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1101.jpeg) ![<pkmn-modern> 1102](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1102.jpeg) ![<pkmn-modern> 1103](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1103.jpeg) ![<pkmn-modern> 1104](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1104.jpeg) ![<pkmn-modern> 1105](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1105.jpeg) ![<pkmn-modern> 1106](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1106.jpeg) ![<pkmn-modern> 1107](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1107.jpeg) ![<pkmn-modern> 1108](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1108.jpeg) ![<pkmn-modern> 1109](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1109.jpeg) ![<pkmn-modern> 1110](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1110.jpeg) ![<pkmn-modern> 1111](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1111.jpeg) ![<pkmn-modern> 1112](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1112.jpeg) ![<pkmn-modern> 1113](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1113.jpeg) ![<pkmn-modern> 1114](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1114.jpeg) ![<pkmn-modern> 1115](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1115.jpeg) ![<pkmn-modern> 1116](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1116.jpeg) ![<pkmn-modern> 1117](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1117.jpeg) ![<pkmn-modern> 1118](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1118.jpeg) ![<pkmn-modern> 1119](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1119.jpeg) ![<pkmn-modern> 1120](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1120.jpeg) ![<pkmn-modern> 1121](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1121.jpeg) ![<pkmn-modern> 1122](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1122.jpeg) ![<pkmn-modern> 1123](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1123.jpeg) ![<pkmn-modern> 1124](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1124.jpeg) ![<pkmn-modern> 1125](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1125.jpeg) ![<pkmn-modern> 1126](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1126.jpeg) ![<pkmn-modern> 1127](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1127.jpeg) ![<pkmn-modern> 1128](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1128.jpeg) ![<pkmn-modern> 1129](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1129.jpeg) ![<pkmn-modern> 1130](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1130.jpeg) ![<pkmn-modern> 1131](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1131.jpeg) ![<pkmn-modern> 1132](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1132.jpeg) ![<pkmn-modern> 1133](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1133.jpeg) ![<pkmn-modern> 1134](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1134.jpeg) ![<pkmn-modern> 1135](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1135.jpeg) ![<pkmn-modern> 1136](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1136.jpeg) ![<pkmn-modern> 1137](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1137.jpeg) ![<pkmn-modern> 1138](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1138.jpeg) ![<pkmn-modern> 1139](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1139.jpeg) ![<pkmn-modern> 1140](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1140.jpeg) ![<pkmn-modern> 1141](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1141.jpeg) ![<pkmn-modern> 1142](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1142.jpeg) ![<pkmn-modern> 1143](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1143.jpeg) ![<pkmn-modern> 1144](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1144.jpeg) ![<pkmn-modern> 1145](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1145.jpeg) ![<pkmn-modern> 1146](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1146.jpeg) ![<pkmn-modern> 1147](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1147.jpeg) ![<pkmn-modern> 1148](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1148.jpeg) ![<pkmn-modern> 1149](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1149.jpeg) ![<pkmn-modern> 1150](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1150.jpeg) ![<pkmn-modern> 1151](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1151.jpeg) ![<pkmn-modern> 1152](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1152.jpeg) ![<pkmn-modern> 1153](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1153.jpeg) ![<pkmn-modern> 1154](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1154.jpeg) ![<pkmn-modern> 1155](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1155.jpeg) ![<pkmn-modern> 1156](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1156.jpeg) ![<pkmn-modern> 1157](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1157.jpeg) ![<pkmn-modern> 1158](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1158.jpeg) ![<pkmn-modern> 1159](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1159.jpeg) ![<pkmn-modern> 1160](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1160.jpeg) ![<pkmn-modern> 1161](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1161.jpeg) ![<pkmn-modern> 1162](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1162.jpeg) ![<pkmn-modern> 1163](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1163.jpeg) ![<pkmn-modern> 1164](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1164.jpeg) ![<pkmn-modern> 1165](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1165.jpeg) ![<pkmn-modern> 1166](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1166.jpeg) ![<pkmn-modern> 1167](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1167.jpeg) ![<pkmn-modern> 1168](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1168.jpeg) ![<pkmn-modern> 1169](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1169.jpeg) ![<pkmn-modern> 1170](https://huggingface.co/sd-concepts-library/pokemon-modern-artwork/resolve/main/concept_images/1170.jpeg)
Biasface/DDDC2
[ "pytorch", "gpt2", "text-generation", "transformers", "conversational" ]
conversational
{ "architectures": [ "GPT2LMHeadModel" ], "model_type": "gpt2", "task_specific_params": { "conversational": { "max_length": 1000 }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
10
null
--- library_name: stable-baselines3 tags: - LunarLander-v2 - deep-reinforcement-learning - reinforcement-learning - stable-baselines3 model-index: - name: PPO results: - task: type: reinforcement-learning name: reinforcement-learning dataset: name: LunarLander-v2 type: LunarLander-v2 metrics: - type: mean_reward value: 266.12 +/- 19.15 name: mean_reward verified: false --- # **PPO** Agent playing **LunarLander-v2** This is a trained model of a **PPO** agent playing **LunarLander-v2** using the [stable-baselines3 library](https://github.com/DLR-RM/stable-baselines3). ## Usage (with Stable-baselines3) TODO: Add your code ```python from stable_baselines3 import ... from huggingface_sb3 import load_from_hub ... ```
BigDaddyNe1L/Hhaa
[]
null
{ "architectures": null, "model_type": null, "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
0
null
--- license: apache-2.0 tags: - generated_from_trainer datasets: - clinc_oos metrics: - accuracy model-index: - name: finetuned_ckpt results: - task: name: Text Classification type: text-classification dataset: name: clinc_oos type: clinc_oos config: plus split: train args: plus metrics: - name: Accuracy type: accuracy value: 0.9161290322580645 --- <!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # finetuned_ckpt This model is a fine-tuned version of [distilbert-base-uncased](https://huggingface.co/distilbert-base-uncased) on the clinc_oos dataset. It achieves the following results on the evaluation set: - Loss: 0.7767 - Accuracy: 0.9161 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 2e-05 - train_batch_size: 48 - eval_batch_size: 48 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 5 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 318 | 3.2814 | 0.7410 | | 3.783 | 2.0 | 636 | 1.8740 | 0.8335 | | 3.783 | 3.0 | 954 | 1.1590 | 0.8916 | | 1.6892 | 4.0 | 1272 | 0.8595 | 0.9103 | | 0.9052 | 5.0 | 1590 | 0.7767 | 0.9161 | ### Framework versions - Transformers 4.21.2 - Pytorch 1.13.0+cu117 - Datasets 2.7.1 - Tokenizers 0.12.1
BigSalmon/BertaMyWorda
[ "pytorch", "roberta", "fill-mask", "transformers", "autotrain_compatible" ]
fill-mask
{ "architectures": [ "RobertaForMaskedLM" ], "model_type": "roberta", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
8
null
--- license: apache-2.0 tags: - generated_from_trainer model-index: - name: t5-base-qasper results: [] --- <!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # t5-base-qasper This model is a fine-tuned version of [t5-base](https://huggingface.co/t5-base) on the None dataset. It achieves the following results on the evaluation set: - Loss: 1.1947 - Answer f1: 0.0483 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 1e-05 - train_batch_size: 8 - eval_batch_size: 8 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 20 - mixed_precision_training: Native AMP ### Training results | Training Loss | Epoch | Step | Validation Loss | Answer f1 | |:-------------:|:-----:|:----:|:---------------:|:---------:| | No log | 1.0 | 262 | 1.4772 | 0.0433 | | 1.5405 | 2.0 | 524 | 1.2919 | 0.0492 | | 1.5405 | 3.0 | 786 | 1.2517 | 0.0491 | | 1.1476 | 4.0 | 1048 | 1.2292 | 0.0492 | | 1.1476 | 5.0 | 1310 | 1.2197 | 0.0497 | | 1.056 | 6.0 | 1572 | 1.2150 | 0.0509 | | 1.056 | 7.0 | 1834 | 1.2116 | 0.0507 | | 0.9915 | 8.0 | 2096 | 1.2048 | 0.0503 | | 0.9915 | 9.0 | 2358 | 1.2056 | 0.0512 | | 0.9418 | 10.0 | 2620 | 1.1954 | 0.0497 | | 0.9418 | 11.0 | 2882 | 1.1977 | 0.0491 | | 0.9348 | 12.0 | 3144 | 1.1954 | 0.0486 | | 0.9348 | 13.0 | 3406 | 1.1926 | 0.0482 | | 0.9073 | 14.0 | 3668 | 1.1946 | 0.0486 | | 0.9073 | 15.0 | 3930 | 1.1919 | 0.0480 | | 0.8769 | 16.0 | 4192 | 1.1955 | 0.0485 | | 0.8769 | 17.0 | 4454 | 1.1941 | 0.0481 | | 0.8754 | 18.0 | 4716 | 1.1947 | 0.0483 | ### Framework versions - Transformers 4.24.0 - Pytorch 1.13.0+cu117 - Datasets 2.7.1 - Tokenizers 0.13.2
BigSalmon/Flowberta
[ "pytorch", "roberta", "fill-mask", "transformers", "autotrain_compatible" ]
fill-mask
{ "architectures": [ "RobertaForMaskedLM" ], "model_type": "roberta", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
13
null
--- license: apache-2.0 tags: - generated_from_trainer - whisper-event datasets: - mozilla-foundation/common_voice_11_0 metrics: - wer model-index: - name: openai/whisper-medium results: - task: name: Automatic Speech Recognition type: automatic-speech-recognition dataset: name: mozilla-foundation/common_voice_11_0 type: mozilla-foundation/common_voice_11_0 config: cs split: test args: cs metrics: - name: Wer type: wer value: 11.835877792305851 --- <!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # openai/whisper-medium This model is a fine-tuned version of [openai/whisper-medium](https://huggingface.co/openai/whisper-medium) on the common_voice_11_0 dataset. It achieves the following results on the evaluation set: - Loss: 0.1805 - Wer: 11.8358 ## Model description The Model is fine-tuned for 1000 steps/updates on CV11 Czech train+valiation data. - Zero-shot - 18.80 (CV9 test data, even on CV11 the WER is closer OR a bit higher than this) - Fine-tuned - 11.83 (CV11 test data) ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 1e-05 - train_batch_size: 64 - eval_batch_size: 32 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_steps: 500 - training_steps: 1000 - mixed_precision_training: Native AMP ### Training results | Training Loss | Epoch | Step | Validation Loss | Wer | |:-------------:|:-----:|:----:|:---------------:|:-------:| | 0.0076 | 4.06 | 1000 | 0.1805 | 11.8358 | ### Framework versions - Transformers 4.26.0.dev0 - Pytorch 1.13.0+cu117 - Datasets 2.7.1.dev0 - Tokenizers 0.13.2
BigSalmon/FormalBerta
[ "pytorch", "roberta", "fill-mask", "transformers", "autotrain_compatible" ]
fill-mask
{ "architectures": [ "RobertaForMaskedLM" ], "model_type": "roberta", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
10
null
--- library_name: stable-baselines3 tags: - LunarLander-v2 - deep-reinforcement-learning - reinforcement-learning - stable-baselines3 model-index: - name: PPO results: - task: type: reinforcement-learning name: reinforcement-learning dataset: name: LunarLander-v2 type: LunarLander-v2 metrics: - type: mean_reward value: 280.15 +/- 18.85 name: mean_reward verified: false --- # **PPO** Agent playing **LunarLander-v2** This is a trained model of a **PPO** agent playing **LunarLander-v2** using the [stable-baselines3 library](https://github.com/DLR-RM/stable-baselines3). ## Usage (with Stable-baselines3) TODO: Add your code ```python from stable_baselines3 import ... from huggingface_sb3 import load_from_hub ... ```
BigSalmon/FormalBerta3
[ "pytorch", "roberta", "fill-mask", "transformers", "autotrain_compatible" ]
fill-mask
{ "architectures": [ "RobertaForMaskedLM" ], "model_type": "roberta", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
4
null
Please refer to [flaim](https://github.com/bobmcdear/flaim) for sample usage and more information.
BigSalmon/FormalRobertaa
[ "pytorch", "roberta", "fill-mask", "transformers", "autotrain_compatible", "has_space" ]
fill-mask
{ "architectures": [ "RobertaForMaskedLM" ], "model_type": "roberta", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
5
null
Please refer to [flaim](https://github.com/bobmcdear/flaim) for sample usage and more information.
BigSalmon/FormalRobertaaa
[ "pytorch", "roberta", "fill-mask", "transformers", "autotrain_compatible" ]
fill-mask
{ "architectures": [ "RobertaForMaskedLM" ], "model_type": "roberta", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
12
2022-12-12T15:08:05Z
Please refer to [flaim](https://github.com/bobmcdear/flaim) for sample usage and more information.
BigSalmon/FroBurta
[]
null
{ "architectures": null, "model_type": null, "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
0
null
Please refer to [flaim](https://github.com/bobmcdear/flaim) for sample usage and more information.
BigSalmon/GPT2HardArticleEasyArticle
[ "pytorch", "jax", "tensorboard", "gpt2", "text-generation", "transformers" ]
text-generation
{ "architectures": [ "GPT2LMHeadModel" ], "model_type": "gpt2", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": true, "max_length": 50 }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
7
null
Please refer to [flaim](https://github.com/bobmcdear/flaim) for sample usage and more information.
BigSalmon/GPT2HardandEasy
[ "pytorch", "tensorboard", "gpt2", "text-generation", "transformers" ]
text-generation
{ "architectures": [ "GPT2LMHeadModel" ], "model_type": "gpt2", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": true, "max_length": 50 }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
9
2022-12-12T15:08:09Z
--- language: en thumbnail: http://www.huggingtweets.com/fhuszar/1670857732486/predictions.png tags: - huggingtweets widget: - text: "My dream is" --- <div class="inline-flex flex-col" style="line-height: 1.5;"> <div class="flex"> <div style="display:inherit; margin-left: 4px; margin-right: 4px; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url(&#39;https://pbs.twimg.com/profile_images/1276429094541025280/9p1fU6X1_400x400.jpg&#39;)"> </div> <div style="display:none; margin-left: 4px; margin-right: 4px; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url(&#39;&#39;)"> </div> <div style="display:none; margin-left: 4px; margin-right: 4px; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url(&#39;&#39;)"> </div> </div> <div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 AI BOT 🤖</div> <div style="text-align: center; font-size: 16px; font-weight: 800">Ferenc Huszár</div> <div style="text-align: center; font-size: 14px;">@fhuszar</div> </div> I was made with [huggingtweets](https://github.com/borisdayma/huggingtweets). Create your own bot based on your favorite user with [the demo](https://colab.research.google.com/github/borisdayma/huggingtweets/blob/master/huggingtweets-demo.ipynb)! ## How does it work? The model uses the following pipeline. ![pipeline](https://github.com/borisdayma/huggingtweets/blob/master/img/pipeline.png?raw=true) To understand how the model was developed, check the [W&B report](https://wandb.ai/wandb/huggingtweets/reports/HuggingTweets-Train-a-Model-to-Generate-Tweets--VmlldzoxMTY5MjI). ## Training data The model was trained on tweets from Ferenc Huszár. | Data | Ferenc Huszár | | --- | --- | | Tweets downloaded | 3241 | | Retweets | 176 | | Short tweets | 327 | | Tweets kept | 2738 | [Explore the data](https://wandb.ai/wandb/huggingtweets/runs/pt0mus9a/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline. ## Training procedure The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on @fhuszar's tweets. Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/wandb/huggingtweets/runs/2c3ck5wm) for full transparency and reproducibility. At the end of training, [the final model](https://wandb.ai/wandb/huggingtweets/runs/2c3ck5wm/artifacts) is logged and versioned. ## How to use You can use this model directly with a pipeline for text generation: ```python from transformers import pipeline generator = pipeline('text-generation', model='huggingtweets/fhuszar') generator("My dream is", num_return_sequences=5) ``` ## Limitations and bias The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias). In addition, the data present in the user's tweets further affects the text generated by the model. ## About *Built by Boris Dayma* [![Follow](https://img.shields.io/twitter/follow/borisdayma?style=social)](https://twitter.com/intent/follow?screen_name=borisdayma) For more details, visit the project repository. [![GitHub stars](https://img.shields.io/github/stars/borisdayma/huggingtweets?style=social)](https://github.com/borisdayma/huggingtweets)
BigSalmon/GPTIntro
[]
null
{ "architectures": null, "model_type": null, "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
0
null
--- language: - es license: apache-2.0 tags: - automatic-speech-recognition - es datasets: - mozilla-foundation/common_voice_7_0 --- # exp_w2v2r_es_xls-r_age_teens-8_sixties-2_s235 Fine-tuned [facebook/wav2vec2-xls-r-300m](https://huggingface.co/facebook/wav2vec2-xls-r-300m) for speech recognition using the train split of [Common Voice 7.0 (es)](https://huggingface.co/datasets/mozilla-foundation/common_voice_7_0). When using this model, make sure that your speech input is sampled at 16kHz. This model has been fine-tuned by the [HuggingSound](https://github.com/jonatasgrosman/huggingsound) tool.
BigSalmon/GPTNeo350MInformalToFormalLincoln
[ "pytorch", "gpt_neo", "text-generation", "transformers", "has_space" ]
text-generation
{ "architectures": [ "GPTNeoForCausalLM" ], "model_type": "gpt_neo", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
8
null
--- language: - es license: apache-2.0 tags: - automatic-speech-recognition - es datasets: - mozilla-foundation/common_voice_7_0 --- # exp_w2v2r_es_xls-r_age_teens-8_sixties-2_s287 Fine-tuned [facebook/wav2vec2-xls-r-300m](https://huggingface.co/facebook/wav2vec2-xls-r-300m) for speech recognition using the train split of [Common Voice 7.0 (es)](https://huggingface.co/datasets/mozilla-foundation/common_voice_7_0). When using this model, make sure that your speech input is sampled at 16kHz. This model has been fine-tuned by the [HuggingSound](https://github.com/jonatasgrosman/huggingsound) tool.
BigSalmon/GPTNeo350MInformalToFormalLincoln3
[ "pytorch", "gpt_neo", "text-generation", "transformers", "has_space" ]
text-generation
{ "architectures": [ "GPTNeoForCausalLM" ], "model_type": "gpt_neo", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
10
null
--- language: - es license: apache-2.0 tags: - automatic-speech-recognition - es datasets: - mozilla-foundation/common_voice_7_0 --- # exp_w2v2r_es_xls-r_age_teens-8_sixties-2_s471 Fine-tuned [facebook/wav2vec2-xls-r-300m](https://huggingface.co/facebook/wav2vec2-xls-r-300m) for speech recognition using the train split of [Common Voice 7.0 (es)](https://huggingface.co/datasets/mozilla-foundation/common_voice_7_0). When using this model, make sure that your speech input is sampled at 16kHz. This model has been fine-tuned by the [HuggingSound](https://github.com/jonatasgrosman/huggingsound) tool.
BigSalmon/GPTNeo350MInformalToFormalLincoln6
[ "pytorch", "gpt_neo", "text-generation", "transformers", "has_space" ]
text-generation
{ "architectures": [ "GPTNeoForCausalLM" ], "model_type": "gpt_neo", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
14
null
--- pipeline_tag: image-to-text tags: - image-captioning languages: - en license: bsd-3-clause --- # BLIP: Bootstrapping Language-Image Pre-training for Unified Vision-Language Understanding and Generation Model card for image captioning pretrained on COCO dataset - base architecture (with ViT base backbone). | ![BLIP.gif](https://s3.amazonaws.com/moonup/production/uploads/1670928184033-62441d1d9fdefb55a0b7d12c.gif) | |:--:| | <b> Pull figure from BLIP official repo | Image source: https://github.com/salesforce/BLIP </b>| ## TL;DR Authors from the [paper](https://arxiv.org/abs/2201.12086) write in the abstract: *Vision-Language Pre-training (VLP) has advanced the performance for many vision-language tasks. However, most existing pre-trained models only excel in either understanding-based tasks or generation-based tasks. Furthermore, performance improvement has been largely achieved by scaling up the dataset with noisy image-text pairs collected from the web, which is a suboptimal source of supervision. In this paper, we propose BLIP, a new VLP framework which transfers flexibly to both vision-language understanding and generation tasks. BLIP effectively utilizes the noisy web data by bootstrapping the captions, where a captioner generates synthetic captions and a filter removes the noisy ones. We achieve state-of-the-art results on a wide range of vision-language tasks, such as image-text retrieval (+2.7% in average recall@1), image captioning (+2.8% in CIDEr), and VQA (+1.6% in VQA score). BLIP also demonstrates strong generalization ability when directly transferred to videolanguage tasks in a zero-shot manner. Code, models, and datasets are released.* ## Usage You can use this model for conditional and un-conditional image captioning ### Using the Pytorch model #### Running the model on CPU <details> <summary> Click to expand </summary> ```python import requests from PIL import Image from transformers import BlipProcessor, BlipForConditionalGeneration processor = BlipProcessor.from_pretrained("Salesforce/blip-image-captioning-base") model = BlipForConditionalGeneration.from_pretrained("Salesforce/blip-image-captioning-base") img_url = 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/demo.jpg' raw_image = Image.open(requests.get(img_url, stream=True).raw).convert('RGB') # conditional image captioning text = "a photography of" inputs = processor(raw_image, text, return_tensors="pt") out = model.generate(**inputs) print(processor.decode(out[0], skip_special_tokens=True)) # >>> a photography of a woman and her dog # unconditional image captioning inputs = processor(raw_image, return_tensors="pt") out = model.generate(**inputs) print(processor.decode(out[0], skip_special_tokens=True)) >>> a woman sitting on the beach with her dog ``` </details> #### Running the model on GPU ##### In full precision <details> <summary> Click to expand </summary> ```python import requests from PIL import Image from transformers import BlipProcessor, BlipForConditionalGeneration processor = BlipProcessor.from_pretrained("Salesforce/blip-image-captioning-base") model = BlipForConditionalGeneration.from_pretrained("Salesforce/blip-image-captioning-base").to("cuda") img_url = 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/demo.jpg' raw_image = Image.open(requests.get(img_url, stream=True).raw).convert('RGB') # conditional image captioning text = "a photography of" inputs = processor(raw_image, text, return_tensors="pt").to("cuda") out = model.generate(**inputs) print(processor.decode(out[0], skip_special_tokens=True)) # >>> a photography of a woman and her dog # unconditional image captioning inputs = processor(raw_image, return_tensors="pt").to("cuda") out = model.generate(**inputs) print(processor.decode(out[0], skip_special_tokens=True)) >>> a woman sitting on the beach with her dog ``` </details> ##### In half precision (`float16`) <details> <summary> Click to expand </summary> ```python import torch import requests from PIL import Image from transformers import BlipProcessor, BlipForConditionalGeneration processor = BlipProcessor.from_pretrained("Salesforce/blip-image-captioning-base") model = BlipForConditionalGeneration.from_pretrained("Salesforce/blip-image-captioning-base", torch_dtype=torch.float16).to("cuda") img_url = 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/demo.jpg' raw_image = Image.open(requests.get(img_url, stream=True).raw).convert('RGB') # conditional image captioning text = "a photography of" inputs = processor(raw_image, text, return_tensors="pt").to("cuda", torch.float16) out = model.generate(**inputs) print(processor.decode(out[0], skip_special_tokens=True)) # >>> a photography of a woman and her dog # unconditional image captioning inputs = processor(raw_image, return_tensors="pt").to("cuda", torch.float16) out = model.generate(**inputs) print(processor.decode(out[0], skip_special_tokens=True)) >>> a woman sitting on the beach with her dog ``` </details> ## BibTex and citation info ``` @misc{https://doi.org/10.48550/arxiv.2201.12086, doi = {10.48550/ARXIV.2201.12086}, url = {https://arxiv.org/abs/2201.12086}, author = {Li, Junnan and Li, Dongxu and Xiong, Caiming and Hoi, Steven}, keywords = {Computer Vision and Pattern Recognition (cs.CV), FOS: Computer and information sciences, FOS: Computer and information sciences}, title = {BLIP: Bootstrapping Language-Image Pre-training for Unified Vision-Language Understanding and Generation}, publisher = {arXiv}, year = {2022}, copyright = {Creative Commons Attribution 4.0 International} } ```
BigSalmon/GPTT
[ "pytorch", "gpt2", "text-generation", "transformers" ]
text-generation
{ "architectures": [ "GPT2LMHeadModel" ], "model_type": "gpt2", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": true, "max_length": 50 }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
9
null
--- license: apache-2.0 tags: - generated_from_trainer datasets: - clinc_oos metrics: - accuracy model-index: - name: distilbert-base-uncased-finetuned-clinc results: - task: name: Text Classification type: text-classification dataset: name: clinc_oos type: clinc_oos config: plus split: train args: plus metrics: - name: Accuracy type: accuracy value: 0.9161290322580645 --- <!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # distilbert-base-uncased-finetuned-clinc This model is a fine-tuned version of [distilbert-base-uncased](https://huggingface.co/distilbert-base-uncased) on the clinc_oos dataset. It achieves the following results on the evaluation set: - Loss: 0.7767 - Accuracy: 0.9161 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 2e-05 - train_batch_size: 48 - eval_batch_size: 48 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 5 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 318 | 3.2814 | 0.7410 | | 3.783 | 2.0 | 636 | 1.8740 | 0.8335 | | 3.783 | 3.0 | 954 | 1.1590 | 0.8916 | | 1.6892 | 4.0 | 1272 | 0.8595 | 0.9103 | | 0.9052 | 5.0 | 1590 | 0.7767 | 0.9161 | ### Framework versions - Transformers 4.21.2 - Pytorch 1.13.0+cu117 - Datasets 2.7.1 - Tokenizers 0.12.1
BigSalmon/InfillFormalLincoln
[ "pytorch", "gpt2", "text-generation", "transformers" ]
text-generation
{ "architectures": [ "GPT2LMHeadModel" ], "model_type": "gpt2", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": true, "max_length": 50 }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
8
null
--- language: - nl license: apache-2.0 tags: - whisper-event - generated_from_trainer datasets: - mozilla-foundation/common_voice_11_0 metrics: - wer model-index: - name: Whisper Large v2 Dutch results: - task: name: Automatic Speech Recognition type: automatic-speech-recognition dataset: name: mozilla-foundation/common_voice_11_0 nl type: mozilla-foundation/common_voice_11_0 config: nl split: test args: nl metrics: - name: Wer type: wer value: 5.895082837397793 --- <!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # Whisper Large v2 Dutch This model is a fine-tuned version of [openai/whisper-large-v2](https://huggingface.co/openai/whisper-large-v2) on the mozilla-foundation/common_voice_11_0 nl dataset. It achieves the following results on the evaluation set: - Loss: 0.1310 - Wer: 5.8951 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 1e-05 - train_batch_size: 4 - eval_batch_size: 4 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_steps: 500 - training_steps: 12000 - mixed_precision_training: Native AMP ### Training results | Training Loss | Epoch | Step | Validation Loss | Wer | |:-------------:|:-----:|:-----:|:---------------:|:-------:| | 0.138 | 0.08 | 1000 | 0.2101 | 11.5288 | | 0.121 | 0.17 | 2000 | 0.1987 | 10.4458 | | 0.1413 | 0.25 | 3000 | 0.1956 | 10.4672 | | 0.1158 | 0.33 | 4000 | 0.1778 | 9.3729 | | 0.1056 | 0.42 | 5000 | 0.1795 | 9.7792 | | 0.056 | 1.05 | 6000 | 0.1560 | 7.6927 | | 0.0323 | 1.14 | 7000 | 0.1460 | 7.1445 | | 0.0213 | 1.22 | 8000 | 0.1491 | 7.2844 | | 0.051 | 1.3 | 9000 | 0.1457 | 6.9587 | | 0.0196 | 1.39 | 10000 | 0.1420 | 6.6086 | | 0.019 | 2.02 | 11000 | 0.1303 | 6.0553 | | 0.0124 | 2.11 | 12000 | 0.1310 | 5.8951 | ### Framework versions - Transformers 4.26.0.dev0 - Pytorch 1.13.0+cu117 - Datasets 2.7.1.dev0 - Tokenizers 0.13.2
BigSalmon/InformalToFormalLincoln19
[ "pytorch", "gpt2", "text-generation", "transformers" ]
text-generation
{ "architectures": [ "GPT2LMHeadModel" ], "model_type": "gpt2", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": true, "max_length": 50 }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
11
null
--- library_name: stable-baselines3 tags: - LunarLander-v2 - deep-reinforcement-learning - reinforcement-learning - stable-baselines3 model-index: - name: PPO results: - task: type: reinforcement-learning name: reinforcement-learning dataset: name: LunarLander-v2 type: LunarLander-v2 metrics: - type: mean_reward value: 262.03 +/- 19.00 name: mean_reward verified: false --- # **PPO** Agent playing **LunarLander-v2** This is a trained model of a **PPO** agent playing **LunarLander-v2** using the [stable-baselines3 library](https://github.com/DLR-RM/stable-baselines3). ## Usage (with Stable-baselines3) TODO: Add your code ```python from stable_baselines3 import ... from huggingface_sb3 import load_from_hub ... ```
BigSalmon/InformalToFormalLincoln21
[ "pytorch", "gpt2", "text-generation", "transformers", "has_space" ]
text-generation
{ "architectures": [ "GPT2LMHeadModel" ], "model_type": "gpt2", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": true, "max_length": 50 }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
8
null
--- license: apache-2.0 tags: - whisper-event - generated_from_trainer datasets: - mozilla-foundation/common_voice_11_0 metrics: - wer model-index: - name: openai/whisper-medium results: - task: name: Automatic Speech Recognition type: automatic-speech-recognition dataset: name: mozilla-foundation/common_voice_11_0 type: mozilla-foundation/common_voice_11_0 config: hu split: test args: hu metrics: - name: Wer type: wer value: 19.839905128965313 --- <!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # openai/whisper-medium This model is a fine-tuned version of [openai/whisper-medium](https://huggingface.co/openai/whisper-medium) on the common_voice_11_0 dataset. It achieves the following results on the evaluation set: - Loss: 0.3069 - Wer: 19.8399 ## Model description The Model is fine-tuned for 1000 steps/updates on CV11 Hungarian train+valiation data. - Zero-shot - 26.9 (CV9 test data, even on CV11 the WER is closer a bit higher than this) - Fine-tuned - 19.83 (CV11 test data) ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 1e-05 - train_batch_size: 64 - eval_batch_size: 32 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_steps: 500 - training_steps: 1000 - mixed_precision_training: Native AMP ### Training results | Training Loss | Epoch | Step | Validation Loss | Wer | |:-------------:|:-----:|:----:|:---------------:|:-------:| | 0.0031 | 7.46 | 1000 | 0.3069 | 19.8399 | ### Framework versions - Transformers 4.26.0.dev0 - Pytorch 1.13.0+cu117 - Datasets 2.7.1.dev0 - Tokenizers 0.13.2
BigSalmon/InformalToFormalLincoln25
[ "pytorch", "gpt2", "text-generation", "transformers", "has_space" ]
text-generation
{ "architectures": [ "GPT2LMHeadModel" ], "model_type": "gpt2", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": true, "max_length": 50 }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
10
null
--- license: creativeml-openrail-m tags: - text-to-image - stable-diffusion --- ### Model Dreambooth concept Miwano_Rag-8000 được train bởi hr16 bằng [Shinja Zero SoTA DreamBooth_Stable_Diffusion](https://colab.research.google.com/drive/1G7qx6M_S1PDDlsWIMdbZXwdZik6sUlEh) notebook <br> Test concept bằng [Shinja Zero no Notebook](https://colab.research.google.com/drive/1Hp1ZIjPbsZKlCtomJVmt2oX7733W44b0) <br> Hoặc test bằng `diffusers` [Colab Notebook for Inference](https://colab.research.google.com/github/huggingface/notebooks/blob/main/diffusers/sd_dreambooth_inference.ipynb) Ảnh mẫu của concept: WIP
BigSalmon/InformalToFormalLincolnDistilledGPT2
[ "pytorch", "gpt2", "text-generation", "transformers" ]
text-generation
{ "architectures": [ "GPT2LMHeadModel" ], "model_type": "gpt2", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": true, "max_length": 50 }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
7
null
--- library_name: stable-baselines3 tags: - LunarLander-v2 - deep-reinforcement-learning - reinforcement-learning - stable-baselines3 model-index: - name: PPO results: - task: type: reinforcement-learning name: reinforcement-learning dataset: name: LunarLander-v2 type: LunarLander-v2 metrics: - type: mean_reward value: 264.47 +/- 17.66 name: mean_reward verified: false --- # **PPO** Agent playing **LunarLander-v2** This is a trained model of a **PPO** agent playing **LunarLander-v2** using the [stable-baselines3 library](https://github.com/DLR-RM/stable-baselines3). ## Usage (with Stable-baselines3) TODO: Add your code ```python from stable_baselines3 import ... from huggingface_sb3 import load_from_hub ... ```
BigSalmon/MrLincoln7
[]
null
{ "architectures": null, "model_type": null, "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
0
null
--- language: - pl license: apache-2.0 tags: - whisper-event - generated_from_trainer datasets: - mozilla-foundation/common_voice_11_0 metrics: - wer model-index: - name: Whisper Large v2 PL results: - task: type: automatic-speech-recognition name: Automatic Speech Recognition dataset: name: Common Voice 11.0 type: mozilla-foundation/common_voice_11_0 config: pl split: test args: pl metrics: - type: wer value: 6.89 name: WER - type: wer_without_norm value: 19.79 name: WER unnormalized - type: cer value: 1.88 name: CER - type: mer value: 6.84 name: MER - task: type: automatic-speech-recognition name: Automatic Speech Recognition dataset: name: facebook/voxpopuli type: facebook/voxpopuli config: pl split: test metrics: - type: wer value: 9.26 name: WER - type: wer_without_norm value: 30.25 name: WER unnormalized - type: cer value: 5.32 name: CER - type: mer value: 9.1 name: MER - task: type: automatic-speech-recognition name: Automatic Speech Recognition dataset: name: google/fleurs type: google/fleurs config: pl_pl split: test metrics: - type: wer value: 9.88 name: WER - type: wer_without_norm value: 29.53 name: WER unnormalized - type: cer value: 5.09 name: CER - type: mer value: 9.73 name: MER --- <!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # Whisper Large v2 PL This model is a fine-tuned version of [openai/whisper-large-v2](https://huggingface.co/openai/whisper-large-v2) on the Common Voice 11.0 dataset. It achieves the following results on the evaluation set: - Loss: 0.4222 - Wer: 6.9125 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 1e-05 - train_batch_size: 8 - eval_batch_size: 4 - seed: 42 - gradient_accumulation_steps: 8 - total_train_batch_size: 64 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_steps: 500 - training_steps: 5000 - mixed_precision_training: Native AMP ### Training results | Training Loss | Epoch | Step | Validation Loss | Wer | |:-------------:|:-----:|:----:|:---------------:|:------:| | 0.1144 | 1.93 | 500 | 0.2016 | 7.4749 | | 0.0441 | 3.86 | 1000 | 0.2193 | 7.3154 | | 0.0099 | 5.79 | 1500 | 0.2983 | 7.0804 | | 0.0048 | 7.72 | 2000 | 0.3514 | 7.0988 | | 0.0017 | 9.65 | 2500 | 0.3614 | 7.0485 | | 0.0014 | 11.58 | 3000 | 0.3814 | 7.1240 | | 0.001 | 13.51 | 3500 | 0.3773 | 6.9931 | | 0.0005 | 15.44 | 4000 | 0.4085 | 6.9662 | | 0.0004 | 17.37 | 4500 | 0.4195 | 6.9192 | | 0.0004 | 19.3 | 5000 | 0.4222 | 6.9125 | ### Framework versions - Transformers 4.26.0.dev0 - Pytorch 1.13.0+cu117 - Datasets 2.7.1.dev0 - Tokenizers 0.13.2
BigSalmon/SimplifyText
[ "pytorch", "gpt2", "text-generation", "transformers" ]
text-generation
{ "architectures": [ "GPT2LMHeadModel" ], "model_type": "gpt2", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": true, "max_length": 50 }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
17
null
--- license: apache-2.0 tags: - generated_from_trainer metrics: - accuracy model-index: - name: distilbert-base-uncased_cls_bbc-news results: [] --- <!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # distilbert-base-uncased_cls_bbc-news This model is a fine-tuned version of [distilbert-base-uncased](https://huggingface.co/distilbert-base-uncased) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 0.1140 - Accuracy: 0.976 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 4e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: cosine - lr_scheduler_warmup_ratio: 0.2 - num_epochs: 5 - mixed_precision_training: Native AMP ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 77 | 0.2531 | 0.944 | | No log | 2.0 | 154 | 0.0971 | 0.973 | | No log | 3.0 | 231 | 0.0951 | 0.977 | | No log | 4.0 | 308 | 0.1166 | 0.975 | | No log | 5.0 | 385 | 0.1140 | 0.976 | ### Framework versions - Transformers 4.20.1 - Pytorch 1.11.0 - Datasets 2.1.0 - Tokenizers 0.12.1
BigSalmon/prepositions
[ "pytorch", "roberta", "fill-mask", "transformers", "autotrain_compatible", "has_space" ]
fill-mask
{ "architectures": [ "RobertaForMaskedLM" ], "model_type": "roberta", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
7
null
--- license: apache-2.0 tags: - text-classification - generated_from_trainer metrics: - accuracy model-index: - name: categorizacion_comercios_v_0.0.8 results: [] --- <!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # categorizacion_comercios_v_0.0.8 This model is a fine-tuned version of [bert-base-uncased](https://huggingface.co/bert-base-uncased) on the datasetX dataset. It achieves the following results on the evaluation set: - Loss: 0.5323 - Accuracy: 0.8671 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 8 - eval_batch_size: 8 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 3 ### Training results ### Framework versions - Transformers 4.23.1 - Pytorch 1.13.0+cpu - Datasets 2.6.1 - Tokenizers 0.13.1
BobBraico/distilbert-base-uncased-finetuned-imdb-accelerate
[]
null
{ "architectures": null, "model_type": null, "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
0
null
--- tags: - unity-ml-agents - ml-agents - deep-reinforcement-learning - reinforcement-learning - ML-Agents-Huggy library_name: ml-agents --- # **ppo** Agent playing **Huggy** This is a trained model of a **ppo** agent playing **Huggy** using the [Unity ML-Agents Library](https://github.com/Unity-Technologies/ml-agents). ## Usage (with ML-Agents) The Documentation: https://github.com/huggingface/ml-agents#get-started We wrote a complete tutorial to learn to train your first agent using ML-Agents and publish it to the Hub: ### Resume the training ``` mlagents-learn <your_configuration_file_path.yaml> --run-id=<run_id> --resume ``` ### Watch your Agent play You can watch your agent **playing directly in your browser:**. 1. Go to https://huggingface.co/spaces/unity/ML-Agents-Huggy 2. Step 1: Write your model_id: Adder/ppo-Huggy 3. Step 2: Select your *.nn /*.onnx file 4. Click on Watch the agent play 👀
Boondong/Wandee
[]
null
{ "architectures": null, "model_type": null, "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
0
null
--- language: - fr license: apache-2.0 tags: - automatic-speech-recognition - fr datasets: - mozilla-foundation/common_voice_7_0 --- # exp_w2v2r_fr_xls-r_age_teens-2_sixties-8_s443 Fine-tuned [facebook/wav2vec2-xls-r-300m](https://huggingface.co/facebook/wav2vec2-xls-r-300m) for speech recognition using the train split of [Common Voice 7.0 (fr)](https://huggingface.co/datasets/mozilla-foundation/common_voice_7_0). When using this model, make sure that your speech input is sampled at 16kHz. This model has been fine-tuned by the [HuggingSound](https://github.com/jonatasgrosman/huggingsound) tool.
Branex/gpt-neo-2.7B
[]
null
{ "architectures": null, "model_type": null, "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
0
null
--- license: mit tags: - generated_from_trainer model-index: - name: toptest results: [] --- <!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # toptest This model is a fine-tuned version of [EleutherAI/gpt-neo-125M](https://huggingface.co/EleutherAI/gpt-neo-125M) on an unknown dataset. ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0005 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 8 - total_train_batch_size: 256 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: cosine - lr_scheduler_warmup_steps: 1000 - num_epochs: 1 - mixed_precision_training: Native AMP ### Training results ### Framework versions - Transformers 4.25.1 - Pytorch 1.10.0+cu111 - Datasets 2.7.1 - Tokenizers 0.13.2
Brendan/cse244b-hw2-roberta
[ "pytorch", "roberta", "text-classification", "transformers" ]
text-classification
{ "architectures": [ "RobertaForSequenceClassification" ], "model_type": "roberta", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
28
null
--- license: mit tags: - generated_from_trainer datasets: - xtreme metrics: - f1 model-index: - name: xlm-roberta-base-finetuned-panx-fr results: - task: name: Token Classification type: token-classification dataset: name: xtreme type: xtreme args: PAN-X.fr metrics: - name: F1 type: f1 value: 0.8340325557979527 --- <!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # xlm-roberta-base-finetuned-panx-fr This model is a fine-tuned version of [xlm-roberta-base](https://huggingface.co/xlm-roberta-base) on the xtreme dataset. It achieves the following results on the evaluation set: - Loss: 0.2723 - F1: 0.8340 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 24 - eval_batch_size: 24 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 3 ### Training results | Training Loss | Epoch | Step | Validation Loss | F1 | |:-------------:|:-----:|:----:|:---------------:|:------:| | 0.5909 | 1.0 | 191 | 0.3404 | 0.7891 | | 0.2594 | 2.0 | 382 | 0.2919 | 0.8152 | | 0.1752 | 3.0 | 573 | 0.2723 | 0.8340 | ### Framework versions - Transformers 4.11.3 - Pytorch 1.13.0+cu116 - Datasets 1.16.1 - Tokenizers 0.10.3
Brokette/projetCS
[ "pytorch", "wav2vec2", "automatic-speech-recognition", "transformers" ]
automatic-speech-recognition
{ "architectures": [ "Wav2Vec2ForCTC" ], "model_type": "wav2vec2", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
4
null
**Ito Junji Diffusion** > Trained by https://twitter.com/cocktailpeanut ItojunjiDiffusion is a Dreambooth trained model that used lots of [Junji Ito](https://en.wikipedia.org/wiki/Junji_Ito) manga images. In your prompt, use the activation token: `itojunji style`. There are 5 ckpt files in this repository. The `model (1).ckpt` is the least trained model, whereas `model (6).ckpt` is the most trained model. **I recommend you try the `model (6).ckpt` first**, and if it doesn't give you desired images, try other ckpt files. ![images/a1.jpg](images/a1.jpg) ![images/a2.jpg](images/a2.jpg) ![images/a3.jpg](images/a3.jpg) ![images/a4.jpg](images/a4.jpg) ![images/a5.jpg](images/a5.jpg) ---
Brona/model1
[]
null
{ "architectures": null, "model_type": null, "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
0
null
--- license: mit tags: - generated_from_trainer model-index: - name: bart-large-cnn_radiology-ai-cardiothoracic-0.9 results: [] --- <!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # bart-large-cnn_radiology-ai-cardiothoracic-0.9 This model is a fine-tuned version of [facebook/bart-large-cnn](https://huggingface.co/facebook/bart-large-cnn) on the None dataset. ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 3 - eval_batch_size: 3 - seed: 42 - gradient_accumulation_steps: 16 - total_train_batch_size: 48 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_steps: 500 - num_epochs: 1 ### Training results ### Framework versions - Transformers 4.20.1 - Pytorch 1.12.1+cu116 - Datasets 2.4.0 - Tokenizers 0.12.1
BrunoNogueira/DialoGPT-kungfupanda
[ "pytorch", "gpt2", "text-generation", "transformers", "conversational" ]
conversational
{ "architectures": [ "GPT2LMHeadModel" ], "model_type": "gpt2", "task_specific_params": { "conversational": { "max_length": 1000 }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
10
null
--- language: - fr license: apache-2.0 tags: - automatic-speech-recognition - fr datasets: - mozilla-foundation/common_voice_7_0 --- # exp_w2v2r_fr_xls-r_age_teens-8_sixties-2_s507 Fine-tuned [facebook/wav2vec2-xls-r-300m](https://huggingface.co/facebook/wav2vec2-xls-r-300m) for speech recognition using the train split of [Common Voice 7.0 (fr)](https://huggingface.co/datasets/mozilla-foundation/common_voice_7_0). When using this model, make sure that your speech input is sampled at 16kHz. This model has been fine-tuned by the [HuggingSound](https://github.com/jonatasgrosman/huggingsound) tool.
Bryan190/Aguy190
[]
null
{ "architectures": null, "model_type": null, "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
0
null
--- license: apache-2.0 tags: - generated_from_trainer - whisper-event metrics: - wer model-index: - name: whisper-small-et results: - task: type: automatic-speech-recognition name: Automatic Speech Recognition dataset: name: mozilla-foundation/common_voice_11_0 type: mozilla-foundation/common_voice_11_0 config: et split: test metrics: - type: wer value: 43.69 name: WER --- <!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # whisper-small-et This model is a fine-tuned version of [openai/whisper-small](https://huggingface.co/openai/whisper-small) on the following datasets: Common Voice 11, VoxPopuli and FLEURS. ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data Estonian data from Common Voice 11, VoxPopuli and FLEURS corpora as both training and validation sets. Tested on Common Voice 11 test set. ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 1e-05 - train_batch_size: 64 - eval_batch_size: 32 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_steps: 500 - training_steps: 2000 - mixed_precision_training: Native AMP ### Training results | Training Loss | Epoch | Step | Validation Loss | Wer | |:-------------:|:-----:|:----:|:---------------:|:-------:| | 1.1285 | 1.03 | 200 | 1.0640 | 53.4934 | | 0.5163 | 2.05 | 400 | 0.6450 | 41.2428 | | 0.2005 | 4.01 | 600 | 0.5600 | 36.6797 | | 0.1188 | 5.03 | 800 | 0.5718 | 35.2847 | | 0.0487 | 6.06 | 1000 | 0.5999 | 34.7500 | | 0.0216 | 8.01 | 1200 | 0.6479 | 38.1906 | | 0.016 | 9.04 | 1400 | 0.6655 | 39.5034 | | 0.0085 | 10.06 | 1600 | 0.7027 | 33.9038 | | 0.0079 | 12.02 | 1800 | 0.7207 | 39.5723 | | 0.009 | 13.04 | 2000 | 0.7261 | 34.5973 | ### Framework versions - Transformers 4.26.0.dev0 - Pytorch 1.12.1+rocm5.1.1 - Datasets 2.7.1.dev0 - Tokenizers 0.13.2
Brykee/BrykeeBot
[]
null
{ "architectures": null, "model_type": null, "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
0
null
--- language: - en tags: - text-to-image - stable-diffusion - dreambooth - anime license: "creativeml-openrail-m" --- #### A fined-tuned stable diffusion model for generating Padorus. **Token:** `PadoruMeme` (use this in your prompt to utilise the style)<br> **Class Phrase:** `1girl` (also use this in the prompt) [Model Download](https://huggingface.co/joujiboi/Padoru-Diffusion/resolve/main/2022-12-12T19-38-27_Padoru_1_training_images_2500_max_training_steps_PadoruMeme_token_1girl_class_word.ckpt) Examples: ![Example 1](https://i.imgur.com/DT0GKXz.png) ![Example 2](https://i.imgur.com/gtG728f.png) ![Example 3](https://i.imgur.com/X6td3X1.png) ![Example 4](https://i.imgur.com/ZLGRDYf.png)
Brykee/DialoGPT-medium-Morty
[ "pytorch", "gpt2", "text-generation", "transformers", "conversational" ]
conversational
{ "architectures": [ "GPT2LMHeadModel" ], "model_type": "gpt2", "task_specific_params": { "conversational": { "max_length": 1000 }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
10
2022-12-12T17:22:44Z
--- license: apache-2.0 tags: - generated_from_trainer metrics: - accuracy model-index: - name: bert-large-uncased_cls_subj results: [] --- <!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # bert-large-uncased_cls_subj This model is a fine-tuned version of [bert-large-uncased](https://huggingface.co/bert-large-uncased) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 0.1860 - Accuracy: 0.9675 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 4e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: cosine - lr_scheduler_warmup_ratio: 0.2 - num_epochs: 5 - mixed_precision_training: Native AMP ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.2427 | 1.0 | 500 | 0.1733 | 0.9585 | | 0.1349 | 2.0 | 1000 | 0.1377 | 0.958 | | 0.0487 | 3.0 | 1500 | 0.1701 | 0.9635 | | 0.0184 | 4.0 | 2000 | 0.1906 | 0.9675 | | 0.0144 | 5.0 | 2500 | 0.1860 | 0.9675 | ### Framework versions - Transformers 4.20.1 - Pytorch 1.11.0 - Datasets 2.1.0 - Tokenizers 0.12.1
Bryson575x/riceboi
[]
null
{ "architectures": null, "model_type": null, "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
0
2022-12-12T17:22:45Z
--- language: - fr license: apache-2.0 tags: - automatic-speech-recognition - fr datasets: - mozilla-foundation/common_voice_7_0 --- # exp_w2v2r_fr_xls-r_age_teens-8_sixties-2_s571 Fine-tuned [facebook/wav2vec2-xls-r-300m](https://huggingface.co/facebook/wav2vec2-xls-r-300m) for speech recognition using the train split of [Common Voice 7.0 (fr)](https://huggingface.co/datasets/mozilla-foundation/common_voice_7_0). When using this model, make sure that your speech input is sampled at 16kHz. This model has been fine-tuned by the [HuggingSound](https://github.com/jonatasgrosman/huggingsound) tool.
Bubb-les/DisloGPT-medium-HarryPotter
[ "pytorch", "gpt2", "text-generation", "transformers", "conversational" ]
conversational
{ "architectures": [ "GPT2LMHeadModel" ], "model_type": "gpt2", "task_specific_params": { "conversational": { "max_length": 1000 }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
8
null
--- license: creativeml-openrail-m tags: - text-to-image - stable-diffusion - MJv4 --- The "v2" of custom tuned SD model based on MJ images. Built on SD 1.5. Use it by including "dvMJv4" or "dvMJv4 style" towards the beginning of your prompt. Sample pictures of this concept: ![MJv4_v2_Thumb.jpg](https://s3.amazonaws.com/moonup/production/uploads/1670867784183-6331c100acb6472115ae666a.jpeg)
BumBelDumBel/TRUMP
[ "pytorch", "tensorboard", "gpt2", "text-generation", "transformers", "generated_from_trainer", "license:mit" ]
text-generation
{ "architectures": [ "GPT2LMHeadModel" ], "model_type": "gpt2", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": true, "max_length": 50 }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
5
null
--- tags: - Taxi-v3 - q-learning - reinforcement-learning - custom-implementation model-index: - name: q-Taxi-v3 results: - task: type: reinforcement-learning name: reinforcement-learning dataset: name: Taxi-v3 type: Taxi-v3 metrics: - type: mean_reward value: 7.56 +/- 2.71 name: mean_reward verified: false --- # **Q-Learning** Agent playing1 **Taxi-v3** This is a trained model of a **Q-Learning** agent playing **Taxi-v3** . ## Usage ```python model = load_from_hub(repo_id="marik0/q-Taxi-v3", filename="q-learning.pkl") # Don't forget to check if you need to add additional attributes (is_slippery=False etc) env = gym.make(model["env_id"]) ```
BumBelDumBel/ZORK-AI-TEST
[ "pytorch", "tensorboard", "gpt2", "text-generation", "transformers", "generated_from_trainer", "license:mit" ]
text-generation
{ "architectures": [ "GPT2LMHeadModel" ], "model_type": "gpt2", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": true, "max_length": 50 }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
9
null
--- tags: - unity-ml-agents - ml-agents - deep-reinforcement-learning - reinforcement-learning - ML-Agents-Huggy library_name: ml-agents --- # **ppo** Agent playing **Huggy** This is a trained model of a **ppo** agent playing **Huggy** using the [Unity ML-Agents Library](https://github.com/Unity-Technologies/ml-agents). ## Usage (with ML-Agents) The Documentation: https://github.com/huggingface/ml-agents#get-started We wrote a complete tutorial to learn to train your first agent using ML-Agents and publish it to the Hub: ### Resume the training ``` mlagents-learn <your_configuration_file_path.yaml> --run-id=<run_id> --resume ``` ### Watch your Agent play You can watch your agent **playing directly in your browser:**. 1. Go to https://huggingface.co/spaces/unity/ML-Agents-Huggy 2. Step 1: Write your model_id: M331/ppo-Huggy 3. Step 2: Select your *.nn /*.onnx file 4. Click on Watch the agent play 👀
BumBelDumBel/ZORK_AI_FANTASY
[]
null
{ "architectures": null, "model_type": null, "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
0
null
--- license: mit tags: - generated_from_trainer datasets: - xtreme metrics: - f1 model-index: - name: xlm-roberta-base-finetuned-panx-it results: - task: name: Token Classification type: token-classification dataset: name: xtreme type: xtreme args: PAN-X.it metrics: - name: F1 type: f1 value: 0.8322368421052632 --- <!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # xlm-roberta-base-finetuned-panx-it This model is a fine-tuned version of [xlm-roberta-base](https://huggingface.co/xlm-roberta-base) on the xtreme dataset. It achieves the following results on the evaluation set: - Loss: 0.2369 - F1: 0.8322 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 24 - eval_batch_size: 24 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 3 ### Training results | Training Loss | Epoch | Step | Validation Loss | F1 | |:-------------:|:-----:|:----:|:---------------:|:------:| | 0.8113 | 1.0 | 70 | 0.3088 | 0.7546 | | 0.259 | 2.0 | 140 | 0.2541 | 0.8155 | | 0.1791 | 3.0 | 210 | 0.2369 | 0.8322 | ### Framework versions - Transformers 4.11.3 - Pytorch 1.13.0+cu116 - Datasets 1.16.1 - Tokenizers 0.10.3
BumBelDumBel/ZORK_AI_SCIFI
[ "pytorch", "tensorboard", "gpt2", "text-generation", "transformers", "generated_from_trainer" ]
text-generation
{ "architectures": [ "GPT2LMHeadModel" ], "model_type": "gpt2", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": true, "max_length": 50 }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
14
null
--- language: - en license: apache-2.0 tags: - generated_from_trainer datasets: - cstop_artificial model-index: - name: t5-base-pointer-adv-cstop_artificial results: [] --- <!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # t5-base-pointer-adv-cstop_artificial This model is a fine-tuned version of [google/mt5-base](https://huggingface.co/google/mt5-base) on the cstop_artificial dataset. It achieves the following results on the evaluation set: - Loss: 0.0728 - Exact Match: 0.7925 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.001 - train_batch_size: 8 - eval_batch_size: 8 - seed: 42 - gradient_accumulation_steps: 64 - total_train_batch_size: 512 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - training_steps: 3000 ### Training results | Training Loss | Epoch | Step | Validation Loss | Exact Match | |:-------------:|:-----:|:----:|:---------------:|:-----------:| | 1.7423 | 12.5 | 200 | 0.1173 | 0.2397 | | 0.3678 | 25.0 | 400 | 0.0728 | 0.3363 | | 0.3202 | 37.5 | 600 | 0.0879 | 0.3381 | | 0.3452 | 50.0 | 800 | 0.0908 | 0.3363 | | 0.3099 | 62.5 | 1000 | 0.1056 | 0.3435 | | 0.3057 | 75.0 | 1200 | 0.1109 | 0.3470 | | 0.3045 | 87.5 | 1400 | 0.1273 | 0.3453 | | 0.3052 | 100.0 | 1600 | 0.1065 | 0.3417 | | 0.3037 | 112.5 | 1800 | 0.1387 | 0.3381 | | 0.3036 | 125.0 | 2000 | 0.1421 | 0.3453 | | 0.3023 | 137.5 | 2200 | 0.1649 | 0.3399 | | 0.3028 | 150.0 | 2400 | 0.1574 | 0.3399 | | 0.3025 | 162.5 | 2600 | 0.1563 | 0.3399 | | 0.3017 | 175.0 | 2800 | 0.1589 | 0.3399 | | 0.302 | 187.5 | 3000 | 0.1587 | 0.3417 | ### Framework versions - Transformers 4.24.0 - Pytorch 1.13.0+cu117 - Datasets 2.7.0 - Tokenizers 0.13.2
Buntan/BuntanAI
[]
null
{ "architectures": null, "model_type": null, "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
0
null
--- tags: - unity-ml-agents - ml-agents - deep-reinforcement-learning - reinforcement-learning - ML-Agents-Huggy library_name: ml-agents --- # **ppo** Agent playing **Huggy** This is a trained model of a **ppo** agent playing **Huggy** using the [Unity ML-Agents Library](https://github.com/Unity-Technologies/ml-agents). ## Usage (with ML-Agents) The Documentation: https://github.com/huggingface/ml-agents#get-started We wrote a complete tutorial to learn to train your first agent using ML-Agents and publish it to the Hub: ### Resume the training ``` mlagents-learn <your_configuration_file_path.yaml> --run-id=<run_id> --resume ``` ### Watch your Agent play You can watch your agent **playing directly in your browser:**. 1. Go to https://huggingface.co/spaces/unity/ML-Agents-Huggy 2. Step 1: Write your model_id: magnomont12/huggy 3. Step 2: Select your *.nn /*.onnx file 4. Click on Watch the agent play 👀
Buntan/xlm-roberta-base-finetuned-marc-en
[]
null
{ "architectures": null, "model_type": null, "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
0
null
Please refer to [flaim](https://github.com/bobmcdear/flaim) for sample usage and more information.
Bwehfuk/Ron
[]
null
{ "architectures": null, "model_type": null, "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
0
null
Please refer to [flaim](https://github.com/bobmcdear/flaim) for sample usage and more information.
CALM/backup
[ "lean_albert", "transformers" ]
null
{ "architectures": [ "LeanAlbertForPretraining", "LeanAlbertForTokenClassification", "LeanAlbertForSequenceClassification" ], "model_type": "lean_albert", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
4
null
Please refer to [flaim](https://github.com/bobmcdear/flaim) for sample usage and more information.
CAMeL-Lab/bert-base-arabic-camelbert-ca-ner
[ "pytorch", "tf", "bert", "token-classification", "ar", "arxiv:2103.06678", "transformers", "license:apache-2.0", "autotrain_compatible" ]
token-classification
{ "architectures": [ "BertForTokenClassification" ], "model_type": "bert", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
85
null
Please refer to [flaim](https://github.com/bobmcdear/flaim) for sample usage and more information.
CAMeL-Lab/bert-base-arabic-camelbert-ca-poetry
[ "pytorch", "tf", "bert", "text-classification", "ar", "arxiv:1905.05700", "arxiv:2103.06678", "transformers", "license:apache-2.0" ]
text-classification
{ "architectures": [ "BertForSequenceClassification" ], "model_type": "bert", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
42
null
Please refer to [flaim](https://github.com/bobmcdear/flaim) for sample usage and more information.
CAMeL-Lab/bert-base-arabic-camelbert-ca-pos-glf
[ "pytorch", "tf", "bert", "token-classification", "ar", "arxiv:2103.06678", "transformers", "license:apache-2.0", "autotrain_compatible" ]
token-classification
{ "architectures": [ "BertForTokenClassification" ], "model_type": "bert", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
18
null
--- language: - en license: creativeml-openrail-m tags: - stable-diffusion - stable-diffusion-diffusers - text-to-image - diffusers inference: true widget: - text: "masterpiece, best quality, 1girl, brown hair, green eyes, colorful, autumn, cumulonimbus clouds, lighting, blue sky, falling leaves, garden" example_title: "example 1girl" - text: "masterpiece, best quality, 1boy, brown hair, green eyes, colorful, autumn, cumulonimbus clouds, lighting, blue sky, falling leaves, garden" example_title: "example 1boy" --- # ACertainModel **Try full functions with Google Colab free T4** [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/drive/1ldhBc70wvuvkp4Af_vNTzTfBXwpf_cH5?usp=sharing) Check Twitter [#ACertainModel](https://twitter.com/hashtag/ACertainModel) for community artworks Welcome to ACertainModel - a latent diffusion model for weebs. This model is intended to produce high-quality, highly detailed anime style pictures with just a few prompts. Like other anime-style Stable Diffusion models, it also supports danbooru tags, including artists, to generate images. Since I noticed that the laion-aesthetics introduced in the Stable-Diffusion-v-1-4 checkpoint hindered finetuning anime style illustration generation model, Dreambooth was used to finetune some tags separately to make it closer to what it was in SD1.2. To avoid overfitting and possible language drift, I added a huge amount of auto-generated pictures from a single word prompt to the training set, using models that are popular in the community such as Anything-3.0, together with partially manual selected full-danbooru images within a year, for further native training. I am also aware of a method of [LoRA](https://arxiv.org/abs/2106.09685), with a similar idea, finetuning attention layer solely, to have better performance on eyes, hands, and other details. For copyright compliance and technical experiment, it was trained from few artist images directly. It was trained on Dreambooth with pictures generated from several popular diffusion models in the community. The checkpoint was initialized with the weights of a Stable Diffusion Model and subsequently fine-tuned for 2K GPU hours on V100 32GB and 600 GPU hours on A100 40GB at 512P dynamic aspect ratio resolution with a certain ratio of unsupervised auto-generated images from several popular diffusion models in the community with some Textual Inversions and Hypernetworks. We do know some tricks on xformers and 8-bit optimization, but we didn't use any of them for better quality and stability. Up to 15 branches are trained simultaneously, cherry-picking about every 20,000 steps. e.g. **_masterpiece, best quality, 1girl, brown hair, green eyes, colorful, autumn, cumulonimbus clouds, lighting, blue sky, falling leaves, garden_** ## About online preview with Hosted inference API, also generation with this model Parameters are not allowed to be modified, as it seems that it is generated with *Clip skip: 1*, for better performance, it is strongly recommended to use *Clip skip: 2* instead. Here is an example of inference settings, if it is applicable with you on your own server: *Steps: 28, Sampler: Euler a, CFG scale: 11, Clip skip: 2*. ## 🧨 Diffusers This model can be used just like any other Stable Diffusion model. For more information, please have a look at the [Stable Diffusion](https://huggingface.co/docs/diffusers/api/pipelines/stable_diffusion). You can also export the model to [ONNX](https://huggingface.co/docs/diffusers/optimization/onnx), [MPS](https://huggingface.co/docs/diffusers/optimization/mps) and/or FLAX/JAX. ```python from diffusers import StableDiffusionPipeline import torch model_id = "JosephusCheung/ACertainModel" branch_name= "main" pipe = StableDiffusionPipeline.from_pretrained(model_id, revision=branch_name, torch_dtype=torch.float16) pipe = pipe.to("cuda") prompt = "pikachu" image = pipe(prompt).images[0] image.save("./pikachu.png") ``` ## Examples Below are some examples of images generated using this model, with better performance on framing and hand gestures, as well as moving objects, comparing to other analogues: **Anime Girl:** ![Anime Girl](https://huggingface.co/JosephusCheung/ACertainModel/resolve/main/samples/sample-1girl.png) ``` 1girl, brown hair, green eyes, colorful, autumn, cumulonimbus clouds, lighting, blue sky, falling leaves, garden Steps: 28, Sampler: Euler a, CFG scale: 11, Seed: 114514, Clip skip: 2 ``` **Anime Boy:** ![Anime Boy](https://huggingface.co/JosephusCheung/ACertainModel/resolve/main/samples/sample-1boy.png) ``` 1boy, brown hair, green eyes, colorful, autumn, cumulonimbus clouds, lighting, blue sky, falling leaves, garden Steps: 28, Sampler: Euler a, CFG scale: 11, Seed: 114514, Clip skip: 2 ``` ## License This model is open access and available to all, with a CreativeML OpenRAIL-M license further specifying rights and usage. The CreativeML OpenRAIL License specifies: 1. You can't use the model to deliberately produce nor share illegal or harmful outputs or content 2. The authors claims no rights on the outputs you generate, you are free to use them and are accountable for their use which must not go against the provisions set in the license 3. You may re-distribute the weights and use the model commercially and/or as a service. If you do, please be aware you have to include the same use restrictions as the ones in the license and share a copy of the CreativeML OpenRAIL-M to all your users (please read the license entirely and carefully) [Please read the full license here](https://huggingface.co/spaces/CompVis/stable-diffusion-license) ## Is it a NovelAI based model? What is the relationship with SD1.2 and SD1.4? See [ASimilarityCalculatior](https://huggingface.co/JosephusCheung/ASimilarityCalculatior)
CAMeL-Lab/bert-base-arabic-camelbert-ca-pos-msa
[ "pytorch", "tf", "bert", "token-classification", "ar", "arxiv:2103.06678", "transformers", "license:apache-2.0", "autotrain_compatible" ]
token-classification
{ "architectures": [ "BertForTokenClassification" ], "model_type": "bert", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
71
null
--- license: mit tags: - generated_from_trainer datasets: - xtreme metrics: - f1 model-index: - name: xlm-roberta-base-finetuned-panx-en results: - task: name: Token Classification type: token-classification dataset: name: xtreme type: xtreme args: PAN-X.en metrics: - name: F1 type: f1 value: 0.6991051454138703 --- <!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # xlm-roberta-base-finetuned-panx-en This model is a fine-tuned version of [xlm-roberta-base](https://huggingface.co/xlm-roberta-base) on the xtreme dataset. It achieves the following results on the evaluation set: - Loss: 0.3926 - F1: 0.6991 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 24 - eval_batch_size: 24 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 3 ### Training results | Training Loss | Epoch | Step | Validation Loss | F1 | |:-------------:|:-----:|:----:|:---------------:|:------:| | 1.1415 | 1.0 | 50 | 0.5404 | 0.5163 | | 0.5045 | 2.0 | 100 | 0.4347 | 0.6498 | | 0.371 | 3.0 | 150 | 0.3926 | 0.6991 | ### Framework versions - Transformers 4.11.3 - Pytorch 1.13.0+cu116 - Datasets 1.16.1 - Tokenizers 0.10.3
CAMeL-Lab/bert-base-arabic-camelbert-ca
[ "pytorch", "tf", "jax", "bert", "fill-mask", "ar", "arxiv:2103.06678", "transformers", "license:apache-2.0", "autotrain_compatible" ]
fill-mask
{ "architectures": [ "BertForMaskedLM" ], "model_type": "bert", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
580
null
--- license: mit tags: - pytorch - diffusers - unconditional-image-generation - diffusion-models-class --- # Model Card for Unit 1 of the [Diffusion Models Class 🧨](https://github.com/huggingface/diffusion-models-class) This model is a diffusion model for unconditional image generation of cute 🦋. ## Usage ```python from diffusers import DDPMPipeline pipeline = DDPMPipeline.from_pretrained('neatbullshit/sd-class-butterflies-32_neatbullshit') image = pipeline().images[0] image ```
CAMeL-Lab/bert-base-arabic-camelbert-da-ner
[ "pytorch", "tf", "bert", "token-classification", "ar", "arxiv:2103.06678", "transformers", "license:apache-2.0", "autotrain_compatible" ]
token-classification
{ "architectures": [ "BertForTokenClassification" ], "model_type": "bert", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
42
null
--- language: - en license: apache-2.0 tags: - generated_from_trainer datasets: - cstop_artificial model-index: - name: t5-base-adv-cstop_artificial results: [] --- <!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # t5-base-adv-cstop_artificial This model is a fine-tuned version of [google/mt5-base](https://huggingface.co/google/mt5-base) on the cstop_artificial dataset. It achieves the following results on the evaluation set: - Loss: 0.0997 - Exact Match: 0.8479 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.001 - train_batch_size: 8 - eval_batch_size: 8 - seed: 42 - gradient_accumulation_steps: 64 - total_train_batch_size: 512 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - training_steps: 3000 ### Training results | Training Loss | Epoch | Step | Validation Loss | Exact Match | |:-------------:|:-----:|:----:|:---------------:|:-----------:| | 1.8954 | 12.5 | 200 | 0.1003 | 0.4902 | | 0.3392 | 25.0 | 400 | 0.0997 | 0.5671 | | 0.3092 | 37.5 | 600 | 0.1067 | 0.5653 | | 0.3062 | 50.0 | 800 | 0.1245 | 0.5689 | | 0.5401 | 62.5 | 1000 | 0.1096 | 0.5581 | | 0.3075 | 75.0 | 1200 | 0.1197 | 0.5581 | | 0.3039 | 87.5 | 1400 | 0.1339 | 0.5689 | | 0.3041 | 100.0 | 1600 | 0.1485 | 0.5635 | | 0.3036 | 112.5 | 1800 | 0.1498 | 0.5581 | | 0.304 | 125.0 | 2000 | 0.1454 | 0.5617 | | 0.3022 | 137.5 | 2200 | 0.1516 | 0.5689 | | 0.3032 | 150.0 | 2400 | 0.1361 | 0.5635 | | 0.3035 | 162.5 | 2600 | 0.1427 | 0.5635 | | 0.3001 | 175.0 | 2800 | 0.1466 | 0.5635 | | 0.3048 | 187.5 | 3000 | 0.1471 | 0.5635 | ### Framework versions - Transformers 4.24.0 - Pytorch 1.13.0+cu117 - Datasets 2.7.0 - Tokenizers 0.13.2
CAMeL-Lab/bert-base-arabic-camelbert-da-pos-egy
[ "pytorch", "tf", "bert", "token-classification", "ar", "arxiv:2103.06678", "transformers", "license:apache-2.0", "autotrain_compatible" ]
token-classification
{ "architectures": [ "BertForTokenClassification" ], "model_type": "bert", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
32
null
--- library_name: stable-baselines3 tags: - LunarLander-v2 - deep-reinforcement-learning - reinforcement-learning - stable-baselines3 model-index: - name: PPO results: - task: type: reinforcement-learning name: reinforcement-learning dataset: name: LunarLander-v2 type: LunarLander-v2 metrics: - type: mean_reward value: 262.75 +/- 17.54 name: mean_reward verified: false --- # **PPO** Agent playing **LunarLander-v2** This is a trained model of a **PPO** agent playing **LunarLander-v2** using the [stable-baselines3 library](https://github.com/DLR-RM/stable-baselines3). ## Usage (with Stable-baselines3) TODO: Add your code ```python from stable_baselines3 import ... from huggingface_sb3 import load_from_hub ... ```
CAMeL-Lab/bert-base-arabic-camelbert-da-sentiment
[ "pytorch", "tf", "bert", "text-classification", "ar", "arxiv:2103.06678", "transformers", "license:apache-2.0", "has_space" ]
text-classification
{ "architectures": [ "BertForSequenceClassification" ], "model_type": "bert", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
19,850
null
--- pipeline_tag: 'visual-question-answering' tags: - visual-question-answering inference: false languages: - en license: bsd-3-clause --- # BLIP: Bootstrapping Language-Image Pre-training for Unified Vision-Language Understanding and Generation Model card for BLIP trained on visual question answering- base architecture (with ViT base backbone). | ![BLIP.gif](https://s3.amazonaws.com/moonup/production/uploads/1670928184033-62441d1d9fdefb55a0b7d12c.gif) | |:--:| | <b> Pull figure from BLIP official repo | Image source: https://github.com/salesforce/BLIP </b>| ## TL;DR Authors from the [paper](https://arxiv.org/abs/2201.12086) write in the abstract: *Vision-Language Pre-training (VLP) has advanced the performance for many vision-language tasks. However, most existing pre-trained models only excel in either understanding-based tasks or generation-based tasks. Furthermore, performance improvement has been largely achieved by scaling up the dataset with noisy image-text pairs collected from the web, which is a suboptimal source of supervision. In this paper, we propose BLIP, a new VLP framework which transfers flexibly to both vision-language understanding and generation tasks. BLIP effectively utilizes the noisy web data by bootstrapping the captions, where a captioner generates synthetic captions and a filter removes the noisy ones. We achieve state-of-the-art results on a wide range of vision-language tasks, such as image-text retrieval (+2.7% in average recall@1), image captioning (+2.8% in CIDEr), and VQA (+1.6% in VQA score). BLIP also demonstrates strong generalization ability when directly transferred to videolanguage tasks in a zero-shot manner. Code, models, and datasets are released.* ## Usage You can use this model for conditional and un-conditional image captioning ### Using the Pytorch model #### Running the model on CPU <details> <summary> Click to expand </summary> ```python import requests from PIL import Image from transformers import BlipProcessor, BlipForQuestionAnswering processor = BlipProcessor.from_pretrained("Salesforce/blip-vqa-base") model = BlipForQuestionAnswering.from_pretrained("Salesforce/blip-vqa-base") img_url = 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/demo.jpg' raw_image = Image.open(requests.get(img_url, stream=True).raw).convert('RGB') question = "how many dogs are in the picture?" inputs = processor(raw_image, question, return_tensors="pt") out = model.generate(**inputs) print(processor.decode(out[0], skip_special_tokens=True)) >>> 1 ``` </details> #### Running the model on GPU ##### In full precision <details> <summary> Click to expand </summary> ```python import requests from PIL import Image from transformers import BlipProcessor, BlipForQuestionAnswering processor = BlipProcessor.from_pretrained("Salesforce/blip-vqa-base") model = BlipForQuestionAnswering.from_pretrained("Salesforce/blip-vqa-base").to("cuda") img_url = 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/demo.jpg' raw_image = Image.open(requests.get(img_url, stream=True).raw).convert('RGB') question = "how many dogs are in the picture?" inputs = processor(raw_image, question, return_tensors="pt").to("cuda") out = model.generate(**inputs) print(processor.decode(out[0], skip_special_tokens=True)) >>> 1 ``` </details> ##### In half precision (`float16`) <details> <summary> Click to expand </summary> ```python import torch import requests from PIL import Image from transformers import BlipProcessor, BlipForQuestionAnswering processor = BlipProcessor.from_pretrained("ybelkada/blip-vqa-base") model = BlipForQuestionAnswering.from_pretrained("ybelkada/blip-vqa-base", torch_dtype=torch.float16).to("cuda") img_url = 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/demo.jpg' raw_image = Image.open(requests.get(img_url, stream=True).raw).convert('RGB') question = "how many dogs are in the picture?" inputs = processor(raw_image, question, return_tensors="pt").to("cuda", torch.float16) out = model.generate(**inputs) print(processor.decode(out[0], skip_special_tokens=True)) >>> 1 ``` </details> ## BibTex and citation info ``` @misc{https://doi.org/10.48550/arxiv.2201.12086, doi = {10.48550/ARXIV.2201.12086}, url = {https://arxiv.org/abs/2201.12086}, author = {Li, Junnan and Li, Dongxu and Xiong, Caiming and Hoi, Steven}, keywords = {Computer Vision and Pattern Recognition (cs.CV), FOS: Computer and information sciences, FOS: Computer and information sciences}, title = {BLIP: Bootstrapping Language-Image Pre-training for Unified Vision-Language Understanding and Generation}, publisher = {arXiv}, year = {2022}, copyright = {Creative Commons Attribution 4.0 International} } ```
CAMeL-Lab/bert-base-arabic-camelbert-da
[ "pytorch", "tf", "jax", "bert", "fill-mask", "ar", "arxiv:2103.06678", "transformers", "license:apache-2.0", "autotrain_compatible" ]
fill-mask
{ "architectures": [ "BertForMaskedLM" ], "model_type": "bert", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
449
2022-12-12T17:53:18Z
--- pipeline_tags: 'other' tags: - image-text-matching languages: - en license: bsd-3-clause --- # BLIP: Bootstrapping Language-Image Pre-training for Unified Vision-Language Understanding and Generation Model card for BLIP trained on image-text matching - base architecture (with ViT base backbone) trained on COCO dataset. | ![BLIP.gif](https://s3.amazonaws.com/moonup/production/uploads/1670928184033-62441d1d9fdefb55a0b7d12c.gif) | |:--:| | <b> Pull figure from BLIP official repo | Image source: https://github.com/salesforce/BLIP </b>| ## TL;DR Authors from the [paper](https://arxiv.org/abs/2201.12086) write in the abstract: *Vision-Language Pre-training (VLP) has advanced the performance for many vision-language tasks. However, most existing pre-trained models only excel in either understanding-based tasks or generation-based tasks. Furthermore, performance improvement has been largely achieved by scaling up the dataset with noisy image-text pairs collected from the web, which is a suboptimal source of supervision. In this paper, we propose BLIP, a new VLP framework which transfers flexibly to both vision-language understanding and generation tasks. BLIP effectively utilizes the noisy web data by bootstrapping the captions, where a captioner generates synthetic captions and a filter removes the noisy ones. We achieve state-of-the-art results on a wide range of vision-language tasks, such as image-text retrieval (+2.7% in average recall@1), image captioning (+2.8% in CIDEr), and VQA (+1.6% in VQA score). BLIP also demonstrates strong generalization ability when directly transferred to videolanguage tasks in a zero-shot manner. Code, models, and datasets are released.* ## Usage You can use this model for conditional and un-conditional image captioning ### Using the Pytorch model #### Running the model on CPU <details> <summary> Click to expand </summary> ```python import requests from PIL import Image from transformers import BlipProcessor, BlipForImageTextRetrieval processor = BlipProcessor.from_pretrained("Salesforce/blip-itm-base-coco") model = BlipForImageTextRetrieval.from_pretrained("Salesforce/blip-itm-base-coco") img_url = 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/demo.jpg' raw_image = Image.open(requests.get(img_url, stream=True).raw).convert('RGB') question = "A woman and a dog sitting together in a beach." inputs = processor(raw_image, question, return_tensors="pt") itm_scores = model(**inputs)[0] cosine_score = model(**inputs, use_itm_head=False)[0] ``` </details> #### Running the model on GPU ##### In full precision <details> <summary> Click to expand </summary> ```python import requests from PIL import Image from transformers import BlipProcessor, BlipForImageTextRetrieval processor = BlipProcessor.from_pretrained("Salesforce/blip-itm-base-coco") model = BlipForImageTextRetrieval.from_pretrained("Salesforce/blip-itm-base-coco").to("cuda") img_url = 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/demo.jpg' raw_image = Image.open(requests.get(img_url, stream=True).raw).convert('RGB') question = "A woman and a dog sitting together in a beach." inputs = processor(raw_image, question, return_tensors="pt").to("cuda") itm_scores = model(**inputs)[0] cosine_score = model(**inputs, use_itm_head=False)[0] ``` </details> ##### In half precision (`float16`) <details> <summary> Click to expand </summary> ```python import torch import requests from PIL import Image from transformers import BlipProcessor, BlipForImageTextRetrieval processor = BlipProcessor.from_pretrained("Salesforce/blip-itm-base-coco") model = BlipForImageTextRetrieval.from_pretrained("Salesforce/blip-itm-base-coco", torch_dtype=torch.float16).to("cuda") img_url = 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/demo.jpg' raw_image = Image.open(requests.get(img_url, stream=True).raw).convert('RGB') question = "A woman and a dog sitting together in a beach." inputs = processor(raw_image, question, return_tensors="pt").to("cuda", torch.float16) itm_scores = model(**inputs)[0] cosine_score = model(**inputs, use_itm_head=False)[0] ``` </details> ## BibTex and citation info ``` @misc{https://doi.org/10.48550/arxiv.2201.12086, doi = {10.48550/ARXIV.2201.12086}, url = {https://arxiv.org/abs/2201.12086}, author = {Li, Junnan and Li, Dongxu and Xiong, Caiming and Hoi, Steven}, keywords = {Computer Vision and Pattern Recognition (cs.CV), FOS: Computer and information sciences, FOS: Computer and information sciences}, title = {BLIP: Bootstrapping Language-Image Pre-training for Unified Vision-Language Understanding and Generation}, publisher = {arXiv}, year = {2022}, copyright = {Creative Commons Attribution 4.0 International} } ```
CAMeL-Lab/bert-base-arabic-camelbert-mix-did-madar-corpus26
[ "pytorch", "tf", "bert", "text-classification", "ar", "arxiv:2103.06678", "transformers", "license:apache-2.0" ]
text-classification
{ "architectures": [ "BertForSequenceClassification" ], "model_type": "bert", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
45
null
Please refer to [flaim](https://github.com/bobmcdear/flaim) for sample usage and more information.
CAMeL-Lab/bert-base-arabic-camelbert-mix-did-madar-corpus6
[ "pytorch", "tf", "bert", "text-classification", "ar", "arxiv:2103.06678", "transformers", "license:apache-2.0" ]
text-classification
{ "architectures": [ "BertForSequenceClassification" ], "model_type": "bert", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
34
null
Please refer to [flaim](https://github.com/bobmcdear/flaim) for sample usage and more information.
CAMeL-Lab/bert-base-arabic-camelbert-mix-did-nadi
[ "pytorch", "tf", "bert", "text-classification", "ar", "arxiv:2103.06678", "transformers", "license:apache-2.0" ]
text-classification
{ "architectures": [ "BertForSequenceClassification" ], "model_type": "bert", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
63
null
Please refer to [flaim](https://github.com/bobmcdear/flaim) for sample usage and more information.
CAMeL-Lab/bert-base-arabic-camelbert-mix-ner
[ "pytorch", "tf", "bert", "token-classification", "ar", "arxiv:2103.06678", "transformers", "license:apache-2.0", "autotrain_compatible", "has_space" ]
token-classification
{ "architectures": [ "BertForTokenClassification" ], "model_type": "bert", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
1,860
null
Please refer to [flaim](https://github.com/bobmcdear/flaim) for sample usage and more information.
CAMeL-Lab/bert-base-arabic-camelbert-mix-poetry
[ "pytorch", "tf", "bert", "text-classification", "ar", "arxiv:1905.05700", "arxiv:2103.06678", "transformers", "license:apache-2.0" ]
text-classification
{ "architectures": [ "BertForSequenceClassification" ], "model_type": "bert", "task_specific_params": { "conversational": { "max_length": null }, "summarization": { "early_stopping": null, "length_penalty": null, "max_length": null, "min_length": null, "no_repeat_ngram_size": null, "num_beams": null, "prefix": null }, "text-generation": { "do_sample": null, "max_length": null }, "translation_en_to_de": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_fr": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null }, "translation_en_to_ro": { "early_stopping": null, "max_length": null, "num_beams": null, "prefix": null } } }
31
null
Please refer to [flaim](https://github.com/bobmcdear/flaim) for sample usage and more information.