T145 commited on
Commit
010ff90
1 Parent(s): 01a1967

Updated linting

Browse files
Files changed (4) hide show
  1. .vscode/settings.json +6 -0
  2. app.py +6 -5
  3. pixi.lock +2 -9
  4. pyproject.toml +28 -2
.vscode/settings.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "editor.codeActionsOnSave": {
3
+ "source.organizeImports": "explicit",
4
+ "source.fixAll": "explicit"
5
+ }
6
+ }
app.py CHANGED
@@ -1,9 +1,10 @@
1
- import gradio as gr
2
- from llama_cpp import Llama
3
  import threading
4
- from huggingface_hub import HfApi
5
  import time
6
 
 
 
 
 
7
  API = HfApi()
8
  LLM = Llama.from_pretrained(
9
  repo_id="mradermacher/ZEUS-8B-V2-i1-GGUF",
@@ -72,8 +73,8 @@ if __name__ == "__main__":
72
  type="messages",
73
  additional_inputs=[
74
  gr.Textbox(value="You are a friendly assistant.", label="System message"),
75
- gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
76
- gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
77
  gr.Slider(
78
  minimum=0.1,
79
  maximum=1.0,
 
 
 
1
  import threading
 
2
  import time
3
 
4
+ import gradio as gr
5
+ from huggingface_hub import HfApi
6
+ from llama_cpp import Llama
7
+
8
  API = HfApi()
9
  LLM = Llama.from_pretrained(
10
  repo_id="mradermacher/ZEUS-8B-V2-i1-GGUF",
 
73
  type="messages",
74
  additional_inputs=[
75
  gr.Textbox(value="You are a friendly assistant.", label="System message"),
76
+ gr.Slider(minimum=100, maximum=2048, value=1024, step=2, label="Max new tokens"),
77
+ gr.Slider(minimum=0.1, maximum=2.0, value=0.7, step=0.05, label="Temperature"),
78
  gr.Slider(
79
  minimum=0.1,
80
  maximum=1.0,
pixi.lock CHANGED
@@ -168,7 +168,6 @@ environments:
168
  - conda: https://conda.anaconda.org/conda-forge/win-64/zstd-1.5.6-h0ea2cb4_0.conda
169
  - pypi: https://files.pythonhosted.org/packages/3f/27/4570e78fc0bf5ea0ca45eb1de3818a23787af9b390c0b0a0033a1b8236f9/diskcache-5.6.3-py3-none-any.whl
170
  - pypi: https://files.pythonhosted.org/packages/91/00/9b4c5557b694fb8d3730d006afbb2facc36f27e2c0f136d9cc009f4b1ffa/llama_cpp_python-0.3.5.tar.gz
171
- - pypi: .
172
  packages:
173
  - conda: https://conda.anaconda.org/conda-forge/win-64/_openmp_mutex-4.5-2_gnu.conda
174
  build_number: 8
@@ -360,14 +359,6 @@ packages:
360
  - pkg:pypi/charset-normalizer?source=hash-mapping
361
  size: 47533
362
  timestamp: 1733218182393
363
- - pypi: .
364
- name: chatdemo
365
- version: 1.0.0
366
- sha256: 72c2f711e707531568782385f530f11ea1c33106fa7cd9d510dd2e40bcf9b920
367
- requires_dist:
368
- - llama-cpp-python
369
- requires_python: <3.11
370
- editable: true
371
  - conda: https://conda.anaconda.org/conda-forge/noarch/click-8.1.8-pyh7428d3b_0.conda
372
  sha256: c889ed359ae47eead4ffe8927b7206b22c55e67d6e74a9044c23736919d61e8d
373
  md5: 90e5571556f7a45db92ee51cb8f97af6
@@ -1919,6 +1910,8 @@ packages:
1919
  - ucrt >=10.0.20348.0
1920
  - vc >=14.2,<15
1921
  - vc14_runtime >=14.29.30139
 
 
1922
  license: MIT
1923
  license_family: MIT
1924
  purls:
 
168
  - conda: https://conda.anaconda.org/conda-forge/win-64/zstd-1.5.6-h0ea2cb4_0.conda
169
  - pypi: https://files.pythonhosted.org/packages/3f/27/4570e78fc0bf5ea0ca45eb1de3818a23787af9b390c0b0a0033a1b8236f9/diskcache-5.6.3-py3-none-any.whl
170
  - pypi: https://files.pythonhosted.org/packages/91/00/9b4c5557b694fb8d3730d006afbb2facc36f27e2c0f136d9cc009f4b1ffa/llama_cpp_python-0.3.5.tar.gz
 
171
  packages:
172
  - conda: https://conda.anaconda.org/conda-forge/win-64/_openmp_mutex-4.5-2_gnu.conda
173
  build_number: 8
 
359
  - pkg:pypi/charset-normalizer?source=hash-mapping
360
  size: 47533
361
  timestamp: 1733218182393
 
 
 
 
 
 
 
 
362
  - conda: https://conda.anaconda.org/conda-forge/noarch/click-8.1.8-pyh7428d3b_0.conda
363
  sha256: c889ed359ae47eead4ffe8927b7206b22c55e67d6e74a9044c23736919d61e8d
364
  md5: 90e5571556f7a45db92ee51cb8f97af6
 
1910
  - ucrt >=10.0.20348.0
1911
  - vc >=14.2,<15
1912
  - vc14_runtime >=14.29.30139
1913
+ arch: x86_64
1914
+ platform: win
1915
  license: MIT
1916
  license_family: MIT
1917
  purls:
pyproject.toml CHANGED
@@ -14,11 +14,37 @@ requires = ["hatchling"]
14
  channels = ["conda-forge", "huggingface"]
15
  platforms = ["win-64"]
16
 
17
- [tool.pixi.pypi-dependencies]
18
- chatdemo = { path = ".", editable = true }
19
 
20
  [tool.pixi.tasks]
21
 
22
  [tool.pixi.dependencies]
23
  gradio = "==5.0.1"
24
  huggingface_hub = "==0.25.2"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
14
  channels = ["conda-forge", "huggingface"]
15
  platforms = ["win-64"]
16
 
17
+ # [tool.pixi.pypi-dependencies]
18
+ # chatdemo = { path = ".", editable = true }
19
 
20
  [tool.pixi.tasks]
21
 
22
  [tool.pixi.dependencies]
23
  gradio = "==5.0.1"
24
  huggingface_hub = "==0.25.2"
25
+ ruff = ">=0.8.4,<0.9"
26
+
27
+ [tool.ruff]
28
+ exclude = [".pixi", "__pycache__"]
29
+ ignore = ["E501", "E402"]
30
+ select = [
31
+ # pycodestyle
32
+ "E",
33
+ # Pyflakes
34
+ "F",
35
+ # Warnings
36
+ "W",
37
+ # isort
38
+ "I",
39
+ # pyupgrade
40
+ "UP",
41
+ # flake8-bugbear
42
+ "B",
43
+ # flake8-simplify
44
+ "SIM",
45
+ # flake8-quotes
46
+ "Q",
47
+ # flake8-async
48
+ "ASYNC"
49
+ ]
50
+ line-length = 132