aimevzulari commited on
Commit
9581e53
·
verified ·
1 Parent(s): 6d6866b

Delete app.py

Browse files
Files changed (1) hide show
  1. app.py +0 -1170
app.py DELETED
@@ -1,1170 +0,0 @@
1
- """
2
- Cursor Rules Generator - Hugging Face Spaces App
3
-
4
- This module implements the Gradio interface for Hugging Face Spaces deployment.
5
- All code is self-contained in this file to avoid import issues.
6
- """
7
-
8
- import os
9
- import gradio as gr
10
- import json
11
- import requests
12
- from dotenv import load_dotenv
13
- from abc import ABC, abstractmethod
14
- from typing import Dict, List, Optional, Any
15
-
16
- # Load environment variables
17
- load_dotenv()
18
-
19
- # Configuration settings
20
- class Settings:
21
- """Application settings."""
22
-
23
- # Application settings
24
- APP_NAME = "Cursor Rules Generator"
25
- DEBUG = os.getenv("DEBUG", "False").lower() == "true"
26
-
27
- # API keys
28
- GEMINI_API_KEY = os.getenv("GEMINI_API_KEY", "")
29
- OPENAI_API_KEY = os.getenv("OPENAI_API_KEY", "")
30
- OPENROUTER_API_KEY = os.getenv("OPENROUTER_API_KEY", "")
31
-
32
- # Default settings
33
- DEFAULT_PROVIDER = os.getenv("DEFAULT_PROVIDER", "gemini")
34
- DEFAULT_RULE_TYPE = os.getenv("DEFAULT_RULE_TYPE", "Always")
35
-
36
- # LLM provider settings
37
- GEMINI_API_URL = "https://generativelanguage.googleapis.com/v1beta"
38
- OPENAI_API_URL = "https://api.openai.com/v1"
39
- OPENROUTER_API_URL = "https://openrouter.ai/api/v1"
40
-
41
- # LLM model settings
42
- DEFAULT_GEMINI_MODEL = os.getenv("DEFAULT_GEMINI_MODEL", "gemini-2.0-flash")
43
- DEFAULT_OPENAI_MODEL = os.getenv("DEFAULT_OPENAI_MODEL", "gpt-4o")
44
- DEFAULT_OPENROUTER_MODEL = os.getenv("DEFAULT_OPENROUTER_MODEL", "openai/gpt-4o")
45
-
46
- # Rule generation settings
47
- MAX_RULE_LENGTH = int(os.getenv("MAX_RULE_LENGTH", "10000"))
48
- DEFAULT_TEMPERATURE = float(os.getenv("DEFAULT_TEMPERATURE", "0.7"))
49
-
50
- # LLM Adapter Interface
51
- class LLMAdapter(ABC):
52
- """Base adapter interface for LLM providers."""
53
-
54
- @abstractmethod
55
- def initialize(self, api_key: str, **kwargs) -> None:
56
- """Initialize the adapter with API key and optional parameters."""
57
- pass
58
-
59
- @abstractmethod
60
- def validate_api_key(self, api_key: str) -> bool:
61
- """Validate the API key."""
62
- pass
63
-
64
- @abstractmethod
65
- def get_available_models(self) -> List[Dict[str, str]]:
66
- """Get a list of available models from the provider."""
67
- pass
68
-
69
- @abstractmethod
70
- def generate_rule(
71
- self,
72
- model: str,
73
- rule_type: str,
74
- description: str,
75
- content: str,
76
- parameters: Optional[Dict[str, Any]] = None
77
- ) -> str:
78
- """Generate a Cursor Rule using the LLM provider."""
79
- pass
80
-
81
- # Gemini Adapter
82
- class GeminiAdapter(LLMAdapter):
83
- """Adapter for Google's Gemini API."""
84
-
85
- def __init__(self):
86
- """Initialize the Gemini adapter."""
87
- self.api_key = None
88
- self.api_url = Settings.GEMINI_API_URL
89
- self.initialized = False
90
-
91
- def initialize(self, api_key: str, **kwargs) -> None:
92
- """Initialize the adapter with API key and optional parameters."""
93
- self.api_key = api_key
94
- self.api_url = kwargs.get('api_url', Settings.GEMINI_API_URL)
95
- self.initialized = True
96
-
97
- def validate_api_key(self, api_key: str) -> bool:
98
- """Validate the Gemini API key."""
99
- try:
100
- # Try to list models with the provided API key
101
- url = f"{self.api_url}/models?key={api_key}"
102
- response = requests.get(url)
103
-
104
- # Check if the request was successful
105
- if response.status_code == 200:
106
- return True
107
- return False
108
- except Exception:
109
- return False
110
-
111
- def get_available_models(self) -> List[Dict[str, str]]:
112
- """Get a list of available Gemini models."""
113
- if not self.initialized:
114
- raise ValueError("Adapter not initialized. Call initialize() first.")
115
-
116
- try:
117
- # Get available models
118
- url = f"{self.api_url}/models?key={self.api_key}"
119
- response = requests.get(url)
120
-
121
- if response.status_code != 200:
122
- raise ValueError(f"Failed to get models: {response.text}")
123
-
124
- data = response.json()
125
-
126
- # Filter for Gemini models and format the response
127
- models = []
128
- for model in data.get('models', []):
129
- if 'gemini' in model.get('name', '').lower():
130
- model_id = model.get('name').split('/')[-1]
131
- models.append({
132
- 'id': model_id,
133
- 'name': self._format_model_name(model_id)
134
- })
135
-
136
- # If no models found, return default models
137
- if not models:
138
- models = [
139
- {'id': 'gemini-2.5-pro', 'name': 'Gemini 2.5 Pro'},
140
- {'id': 'gemini-2.0-flash', 'name': 'Gemini 2.0 Flash'},
141
- {'id': 'gemini-2.0-flash-lite', 'name': 'Gemini 2.0 Flash-Lite'}
142
- ]
143
-
144
- return models
145
- except Exception as e:
146
- # Return default models on error
147
- return [
148
- {'id': 'gemini-2.5-pro', 'name': 'Gemini 2.5 Pro'},
149
- {'id': 'gemini-2.0-flash', 'name': 'Gemini 2.0 Flash'},
150
- {'id': 'gemini-2.0-flash-lite', 'name': 'Gemini 2.0 Flash-Lite'}
151
- ]
152
-
153
- def generate_rule(
154
- self,
155
- model: str,
156
- rule_type: str,
157
- description: str,
158
- content: str,
159
- parameters: Optional[Dict[str, Any]] = None
160
- ) -> str:
161
- """Generate a Cursor Rule using Gemini."""
162
- if not self.initialized:
163
- raise ValueError("Adapter not initialized. Call initialize() first.")
164
-
165
- # Set default parameters if not provided
166
- if parameters is None:
167
- parameters = {}
168
-
169
- # Extract parameters
170
- temperature = parameters.get('temperature', Settings.DEFAULT_TEMPERATURE)
171
- globs = parameters.get('globs', '')
172
- referenced_files = parameters.get('referenced_files', '')
173
- prompt = parameters.get('prompt', '')
174
-
175
- # Prepare the prompt for Gemini
176
- system_prompt = """
177
- You are a Cursor Rules expert. Create a rule in MDC format based on the provided information.
178
-
179
- MDC format example:
180
- ---
181
- description: RPC Service boilerplate
182
- globs:
183
- alwaysApply: false
184
- ---
185
-
186
- - Use our internal RPC pattern when defining services
187
- - Always use snake_case for service names.
188
-
189
- @service-template.ts
190
- """
191
-
192
- user_prompt = f"""
193
- Create a Cursor Rule with the following details:
194
-
195
- Rule Type: {rule_type}
196
- Description: {description}
197
- Content: {content}
198
- """
199
-
200
- if globs:
201
- user_prompt += f"\nGlobs: {globs}"
202
-
203
- if referenced_files:
204
- user_prompt += f"\nReferenced Files: {referenced_files}"
205
-
206
- if prompt:
207
- user_prompt += f"\nAdditional Instructions: {prompt}"
208
-
209
- # Prepare the API request
210
- url = f"{self.api_url}/models/{model}:generateContent?key={self.api_key}"
211
-
212
- payload = {
213
- "contents": [
214
- {
215
- "role": "user",
216
- "parts": [
217
- {"text": system_prompt + "\n\n" + user_prompt}
218
- ]
219
- }
220
- ],
221
- "generationConfig": {
222
- "temperature": temperature,
223
- "topP": 0.8,
224
- "topK": 40,
225
- "maxOutputTokens": 2048
226
- }
227
- }
228
-
229
- # Make the API request
230
- try:
231
- response = requests.post(url, json=payload)
232
-
233
- if response.status_code != 200:
234
- raise ValueError(f"Failed to generate rule: {response.text}")
235
-
236
- data = response.json()
237
-
238
- # Extract the generated text
239
- generated_text = data.get('candidates', [{}])[0].get('content', {}).get('parts', [{}])[0].get('text', '')
240
-
241
- # If no text was generated, create a basic rule
242
- if not generated_text:
243
- return self._create_basic_rule(rule_type, description, content, globs, referenced_files)
244
-
245
- return generated_text
246
- except Exception as e:
247
- # Create a basic rule on error
248
- return self._create_basic_rule(rule_type, description, content, globs, referenced_files)
249
-
250
- def _format_model_name(self, model_id: str) -> str:
251
- """Format a model ID into a human-readable name."""
252
- # Replace hyphens with spaces and capitalize each word
253
- name = model_id.replace('-', ' ').title()
254
-
255
- # Special case handling
256
- name = name.replace('Gemini ', 'Gemini ')
257
- name = name.replace('Pro ', 'Pro ')
258
- name = name.replace('Flash ', 'Flash ')
259
- name = name.replace('Lite', 'Lite')
260
-
261
- return name
262
-
263
- def _create_basic_rule(
264
- self,
265
- rule_type: str,
266
- description: str,
267
- content: str,
268
- globs: str = '',
269
- referenced_files: str = ''
270
- ) -> str:
271
- """Create a basic rule in MDC format without using the LLM."""
272
- # Create MDC format
273
- mdc = '---\n'
274
- mdc += f'description: {description}\n'
275
-
276
- if rule_type == 'Auto Attached' and globs:
277
- mdc += f'globs: {globs}\n'
278
-
279
- if rule_type == 'Always':
280
- mdc += 'alwaysApply: true\n'
281
- else:
282
- mdc += 'alwaysApply: false\n'
283
-
284
- mdc += '---\n\n'
285
- mdc += content + '\n'
286
-
287
- # Add referenced files
288
- if referenced_files:
289
- mdc += '\n' + referenced_files
290
-
291
- return mdc
292
-
293
- # OpenAI Adapter
294
- class OpenAIAdapter(LLMAdapter):
295
- """Adapter for OpenAI API."""
296
-
297
- def __init__(self):
298
- """Initialize the OpenAI adapter."""
299
- self.api_key = None
300
- self.api_url = Settings.OPENAI_API_URL
301
- self.initialized = False
302
-
303
- def initialize(self, api_key: str, **kwargs) -> None:
304
- """Initialize the adapter with API key and optional parameters."""
305
- self.api_key = api_key
306
- self.api_url = kwargs.get('api_url', Settings.OPENAI_API_URL)
307
- self.initialized = True
308
-
309
- def validate_api_key(self, api_key: str) -> bool:
310
- """Validate the OpenAI API key."""
311
- try:
312
- # Try to list models with the provided API key
313
- url = f"{self.api_url}/models"
314
- headers = {
315
- "Authorization": f"Bearer {api_key}"
316
- }
317
- response = requests.get(url, headers=headers)
318
-
319
- # Check if the request was successful
320
- if response.status_code == 200:
321
- return True
322
- return False
323
- except Exception:
324
- return False
325
-
326
- def get_available_models(self) -> List[Dict[str, str]]:
327
- """Get a list of available OpenAI models."""
328
- if not self.initialized:
329
- raise ValueError("Adapter not initialized. Call initialize() first.")
330
-
331
- try:
332
- # Get available models
333
- url = f"{self.api_url}/models"
334
- headers = {
335
- "Authorization": f"Bearer {self.api_key}"
336
- }
337
- response = requests.get(url, headers=headers)
338
-
339
- if response.status_code != 200:
340
- raise ValueError(f"Failed to get models: {response.text}")
341
-
342
- data = response.json()
343
-
344
- # Filter for chat models and format the response
345
- models = []
346
- for model in data.get('data', []):
347
- model_id = model.get('id')
348
- if any(prefix in model_id for prefix in ['gpt-4', 'gpt-3.5']):
349
- models.append({
350
- 'id': model_id,
351
- 'name': self._format_model_name(model_id)
352
- })
353
-
354
- # If no models found, return default models
355
- if not models:
356
- models = [
357
- {'id': 'gpt-4o', 'name': 'GPT-4o'},
358
- {'id': 'gpt-4-turbo', 'name': 'GPT-4 Turbo'},
359
- {'id': 'gpt-3.5-turbo', 'name': 'GPT-3.5 Turbo'}
360
- ]
361
-
362
- return models
363
- except Exception as e:
364
- # Return default models on error
365
- return [
366
- {'id': 'gpt-4o', 'name': 'GPT-4o'},
367
- {'id': 'gpt-4-turbo', 'name': 'GPT-4 Turbo'},
368
- {'id': 'gpt-3.5-turbo', 'name': 'GPT-3.5 Turbo'}
369
- ]
370
-
371
- def generate_rule(
372
- self,
373
- model: str,
374
- rule_type: str,
375
- description: str,
376
- content: str,
377
- parameters: Optional[Dict[str, Any]] = None
378
- ) -> str:
379
- """Generate a Cursor Rule using OpenAI."""
380
- if not self.initialized:
381
- raise ValueError("Adapter not initialized. Call initialize() first.")
382
-
383
- # Set default parameters if not provided
384
- if parameters is None:
385
- parameters = {}
386
-
387
- # Extract parameters
388
- temperature = parameters.get('temperature', Settings.DEFAULT_TEMPERATURE)
389
- globs = parameters.get('globs', '')
390
- referenced_files = parameters.get('referenced_files', '')
391
- prompt = parameters.get('prompt', '')
392
-
393
- # Prepare the prompt for OpenAI
394
- system_prompt = """
395
- You are a Cursor Rules expert. Create a rule in MDC format based on the provided information.
396
-
397
- MDC format example:
398
- ---
399
- description: RPC Service boilerplate
400
- globs:
401
- alwaysApply: false
402
- ---
403
-
404
- - Use our internal RPC pattern when defining services
405
- - Always use snake_case for service names.
406
-
407
- @service-template.ts
408
- """
409
-
410
- user_prompt = f"""
411
- Create a Cursor Rule with the following details:
412
-
413
- Rule Type: {rule_type}
414
- Description: {description}
415
- Content: {content}
416
- """
417
-
418
- if globs:
419
- user_prompt += f"\nGlobs: {globs}"
420
-
421
- if referenced_files:
422
- user_prompt += f"\nReferenced Files: {referenced_files}"
423
-
424
- if prompt:
425
- user_prompt += f"\nAdditional Instructions: {prompt}"
426
-
427
- # Prepare the API request
428
- url = f"{self.api_url}/chat/completions"
429
- headers = {
430
- "Authorization": f"Bearer {self.api_key}",
431
- "Content-Type": "application/json"
432
- }
433
-
434
- payload = {
435
- "model": model,
436
- "messages": [
437
- {
438
- "role": "system",
439
- "content": system_prompt
440
- },
441
- {
442
- "role": "user",
443
- "content": user_prompt
444
- }
445
- ],
446
- "temperature": temperature,
447
- "max_tokens": 2048
448
- }
449
-
450
- # Make the API request
451
- try:
452
- response = requests.post(url, headers=headers, json=payload)
453
-
454
- if response.status_code != 200:
455
- raise ValueError(f"Failed to generate rule: {response.text}")
456
-
457
- data = response.json()
458
-
459
- # Extract the generated text
460
- generated_text = data.get('choices', [{}])[0].get('message', {}).get('content', '')
461
-
462
- # If no text was generated, create a basic rule
463
- if not generated_text:
464
- return self._create_basic_rule(rule_type, description, content, globs, referenced_files)
465
-
466
- return generated_text
467
- except Exception as e:
468
- # Create a basic rule on error
469
- return self._create_basic_rule(rule_type, description, content, globs, referenced_files)
470
-
471
- def _format_model_name(self, model_id: str) -> str:
472
- """Format a model ID into a human-readable name."""
473
- # Replace hyphens with spaces and capitalize each word
474
- name = model_id.replace('-', ' ').title()
475
-
476
- # Special case handling
477
- name = name.replace('Gpt ', 'GPT ')
478
- name = name.replace('Gpt4', 'GPT-4')
479
- name = name.replace('Gpt3', 'GPT-3')
480
- name = name.replace('Gpt 4', 'GPT-4')
481
- name = name.replace('Gpt 3', 'GPT-3')
482
- name = name.replace('Turbo', 'Turbo')
483
- name = name.replace('O', 'o')
484
-
485
- return name
486
-
487
- def _create_basic_rule(
488
- self,
489
- rule_type: str,
490
- description: str,
491
- content: str,
492
- globs: str = '',
493
- referenced_files: str = ''
494
- ) -> str:
495
- """Create a basic rule in MDC format without using the LLM."""
496
- # Create MDC format
497
- mdc = '---\n'
498
- mdc += f'description: {description}\n'
499
-
500
- if rule_type == 'Auto Attached' and globs:
501
- mdc += f'globs: {globs}\n'
502
-
503
- if rule_type == 'Always':
504
- mdc += 'alwaysApply: true\n'
505
- else:
506
- mdc += 'alwaysApply: false\n'
507
-
508
- mdc += '---\n\n'
509
- mdc += content + '\n'
510
-
511
- # Add referenced files
512
- if referenced_files:
513
- mdc += '\n' + referenced_files
514
-
515
- return mdc
516
-
517
- # OpenRouter Adapter
518
- class OpenRouterAdapter(LLMAdapter):
519
- """Adapter for OpenRouter API."""
520
-
521
- def __init__(self):
522
- """Initialize the OpenRouter adapter."""
523
- self.api_key = None
524
- self.api_url = Settings.OPENROUTER_API_URL
525
- self.initialized = False
526
-
527
- def initialize(self, api_key: str, **kwargs) -> None:
528
- """Initialize the adapter with API key and optional parameters."""
529
- self.api_key = api_key
530
- self.api_url = kwargs.get('api_url', Settings.OPENROUTER_API_URL)
531
- self.site_url = kwargs.get('site_url', 'https://cursor-rules-generator.example.com')
532
- self.site_name = kwargs.get('site_name', 'Cursor Rules Generator')
533
- self.initialized = True
534
-
535
- def validate_api_key(self, api_key: str) -> bool:
536
- """Validate the OpenRouter API key."""
537
- try:
538
- # Try to list models with the provided API key
539
- url = f"{self.api_url}/models"
540
- headers = {
541
- "Authorization": f"Bearer {api_key}"
542
- }
543
- response = requests.get(url, headers=headers)
544
-
545
- # Check if the request was successful
546
- if response.status_code == 200:
547
- return True
548
- return False
549
- except Exception:
550
- return False
551
-
552
- def get_available_models(self) -> List[Dict[str, str]]:
553
- """Get a list of available OpenRouter models."""
554
- if not self.initialized:
555
- raise ValueError("Adapter not initialized. Call initialize() first.")
556
-
557
- try:
558
- # Get available models
559
- url = f"{self.api_url}/models"
560
- headers = {
561
- "Authorization": f"Bearer {self.api_key}"
562
- }
563
- response = requests.get(url, headers=headers)
564
-
565
- if response.status_code != 200:
566
- raise ValueError(f"Failed to get models: {response.text}")
567
-
568
- data = response.json()
569
-
570
- # Format the response
571
- models = []
572
- for model in data.get('data', []):
573
- model_id = model.get('id')
574
- model_name = model.get('name', model_id)
575
-
576
- # Skip non-chat models
577
- if not model.get('capabilities', {}).get('chat'):
578
- continue
579
-
580
- models.append({
581
- 'id': model_id,
582
- 'name': model_name
583
- })
584
-
585
- # If no models found, return default models
586
- if not models:
587
- models = [
588
- {'id': 'openai/gpt-4o', 'name': 'OpenAI GPT-4o'},
589
- {'id': 'anthropic/claude-3-opus', 'name': 'Anthropic Claude 3 Opus'},
590
- {'id': 'google/gemini-2.5-pro', 'name': 'Google Gemini 2.5 Pro'},
591
- {'id': 'meta-llama/llama-3-70b-instruct', 'name': 'Meta Llama 3 70B'}
592
- ]
593
-
594
- return models
595
- except Exception as e:
596
- # Return default models on error
597
- return [
598
- {'id': 'openai/gpt-4o', 'name': 'OpenAI GPT-4o'},
599
- {'id': 'anthropic/claude-3-opus', 'name': 'Anthropic Claude 3 Opus'},
600
- {'id': 'google/gemini-2.5-pro', 'name': 'Google Gemini 2.5 Pro'},
601
- {'id': 'meta-llama/llama-3-70b-instruct', 'name': 'Meta Llama 3 70B'}
602
- ]
603
-
604
- def generate_rule(
605
- self,
606
- model: str,
607
- rule_type: str,
608
- description: str,
609
- content: str,
610
- parameters: Optional[Dict[str, Any]] = None
611
- ) -> str:
612
- """Generate a Cursor Rule using OpenRouter."""
613
- if not self.initialized:
614
- raise ValueError("Adapter not initialized. Call initialize() first.")
615
-
616
- # Set default parameters if not provided
617
- if parameters is None:
618
- parameters = {}
619
-
620
- # Extract parameters
621
- temperature = parameters.get('temperature', Settings.DEFAULT_TEMPERATURE)
622
- globs = parameters.get('globs', '')
623
- referenced_files = parameters.get('referenced_files', '')
624
- prompt = parameters.get('prompt', '')
625
-
626
- # Prepare the prompt for OpenRouter
627
- system_prompt = """
628
- You are a Cursor Rules expert. Create a rule in MDC format based on the provided information.
629
-
630
- MDC format example:
631
- ---
632
- description: RPC Service boilerplate
633
- globs:
634
- alwaysApply: false
635
- ---
636
-
637
- - Use our internal RPC pattern when defining services
638
- - Always use snake_case for service names.
639
-
640
- @service-template.ts
641
- """
642
-
643
- user_prompt = f"""
644
- Create a Cursor Rule with the following details:
645
-
646
- Rule Type: {rule_type}
647
- Description: {description}
648
- Content: {content}
649
- """
650
-
651
- if globs:
652
- user_prompt += f"\nGlobs: {globs}"
653
-
654
- if referenced_files:
655
- user_prompt += f"\nReferenced Files: {referenced_files}"
656
-
657
- if prompt:
658
- user_prompt += f"\nAdditional Instructions: {prompt}"
659
-
660
- # Prepare the API request
661
- url = f"{self.api_url}/chat/completions"
662
- headers = {
663
- "Authorization": f"Bearer {self.api_key}",
664
- "Content-Type": "application/json",
665
- "HTTP-Referer": self.site_url,
666
- "X-Title": self.site_name
667
- }
668
-
669
- payload = {
670
- "model": model,
671
- "messages": [
672
- {
673
- "role": "system",
674
- "content": system_prompt
675
- },
676
- {
677
- "role": "user",
678
- "content": user_prompt
679
- }
680
- ],
681
- "temperature": temperature,
682
- "max_tokens": 2048
683
- }
684
-
685
- # Make the API request
686
- try:
687
- response = requests.post(url, headers=headers, json=payload)
688
-
689
- if response.status_code != 200:
690
- raise ValueError(f"Failed to generate rule: {response.text}")
691
-
692
- data = response.json()
693
-
694
- # Extract the generated text
695
- generated_text = data.get('choices', [{}])[0].get('message', {}).get('content', '')
696
-
697
- # If no text was generated, create a basic rule
698
- if not generated_text:
699
- return self._create_basic_rule(rule_type, description, content, globs, referenced_files)
700
-
701
- return generated_text
702
- except Exception as e:
703
- # Create a basic rule on error
704
- return self._create_basic_rule(rule_type, description, content, globs, referenced_files)
705
-
706
- def _create_basic_rule(
707
- self,
708
- rule_type: str,
709
- description: str,
710
- content: str,
711
- globs: str = '',
712
- referenced_files: str = ''
713
- ) -> str:
714
- """Create a basic rule in MDC format without using the LLM."""
715
- # Create MDC format
716
- mdc = '---\n'
717
- mdc += f'description: {description}\n'
718
-
719
- if rule_type == 'Auto Attached' and globs:
720
- mdc += f'globs: {globs}\n'
721
-
722
- if rule_type == 'Always':
723
- mdc += 'alwaysApply: true\n'
724
- else:
725
- mdc += 'alwaysApply: false\n'
726
-
727
- mdc += '---\n\n'
728
- mdc += content + '\n'
729
-
730
- # Add referenced files
731
- if referenced_files:
732
- mdc += '\n' + referenced_files
733
-
734
- return mdc
735
-
736
- # LLM Adapter Factory
737
- class LLMAdapterFactory:
738
- """Factory for creating LLM adapters."""
739
-
740
- @staticmethod
741
- def create_adapter(provider_name: str) -> LLMAdapter:
742
- """Create an adapter for the specified provider."""
743
- provider_name = provider_name.lower()
744
-
745
- if provider_name == "gemini":
746
- return GeminiAdapter()
747
- elif provider_name == "openai":
748
- return OpenAIAdapter()
749
- elif provider_name == "openrouter":
750
- return OpenRouterAdapter()
751
- else:
752
- raise ValueError(f"Unsupported provider: {provider_name}")
753
-
754
- @staticmethod
755
- def get_supported_providers() -> Dict[str, str]:
756
- """Get a dictionary of supported providers."""
757
- return {
758
- "gemini": "Google Gemini",
759
- "openai": "OpenAI",
760
- "openrouter": "OpenRouter"
761
- }
762
-
763
- # Rule Generator
764
- class RuleGenerator:
765
- """Engine for generating Cursor Rules."""
766
-
767
- def __init__(self):
768
- """Initialize the rule generator."""
769
- self.factory = LLMAdapterFactory()
770
-
771
- def create_rule(
772
- self,
773
- provider: str,
774
- model: str,
775
- rule_type: str,
776
- description: str,
777
- content: str,
778
- api_key: str,
779
- parameters: Optional[Dict[str, Any]] = None
780
- ) -> str:
781
- """Create a Cursor Rule using the specified LLM provider."""
782
- # Set default parameters if not provided
783
- if parameters is None:
784
- parameters = {}
785
-
786
- try:
787
- # Create and initialize the adapter
788
- adapter = self.factory.create_adapter(provider)
789
- adapter.initialize(api_key)
790
-
791
- # Generate the rule using the adapter
792
- rule = adapter.generate_rule(model, rule_type, description, content, parameters)
793
-
794
- return rule
795
- except Exception as e:
796
- # If LLM generation fails, create a basic rule
797
- return self._create_basic_rule(rule_type, description, content, parameters)
798
-
799
- def _create_basic_rule(
800
- self,
801
- rule_type: str,
802
- description: str,
803
- content: str,
804
- parameters: Optional[Dict[str, Any]] = None
805
- ) -> str:
806
- """Create a basic rule in MDC format without using an LLM."""
807
- # Set default parameters if not provided
808
- if parameters is None:
809
- parameters = {}
810
-
811
- # Extract parameters
812
- globs = parameters.get('globs', '')
813
- referenced_files = parameters.get('referenced_files', '')
814
-
815
- # Create MDC format
816
- mdc = '---\n'
817
- mdc += f'description: {description}\n'
818
-
819
- if rule_type == 'Auto Attached' and globs:
820
- mdc += f'globs: {globs}\n'
821
-
822
- if rule_type == 'Always':
823
- mdc += 'alwaysApply: true\n'
824
- else:
825
- mdc += 'alwaysApply: false\n'
826
-
827
- mdc += '---\n\n'
828
- mdc += content + '\n'
829
-
830
- # Add referenced files
831
- if referenced_files:
832
- mdc += '\n' + referenced_files
833
-
834
- return mdc
835
-
836
- def validate_rule_type(self, rule_type: str) -> bool:
837
- """Validate if the rule type is supported."""
838
- valid_types = ['Always', 'Auto Attached', 'Agent Requested', 'Manual']
839
- return rule_type in valid_types
840
-
841
- def get_rule_types(self) -> List[Dict[str, str]]:
842
- """Get a list of supported rule types."""
843
- return [
844
- {
845
- 'id': 'Always',
846
- 'name': 'Always',
847
- 'description': 'Always included in the model context'
848
- },
849
- {
850
- 'id': 'Auto Attached',
851
- 'name': 'Auto Attached',
852
- 'description': 'Included when files matching glob patterns are referenced'
853
- },
854
- {
855
- 'id': 'Agent Requested',
856
- 'name': 'Agent Requested',
857
- 'description': 'Rule is presented to the AI, which decides whether to include it'
858
- },
859
- {
860
- 'id': 'Manual',
861
- 'name': 'Manual',
862
- 'description': 'Only included when explicitly referenced using @ruleName'
863
- }
864
- ]
865
-
866
- # Initialize components
867
- rule_generator = RuleGenerator()
868
- factory = LLMAdapterFactory()
869
-
870
- # Get supported providers
871
- providers = factory.get_supported_providers()
872
- provider_choices = list(providers.keys())
873
-
874
- # Get rule types
875
- rule_types = rule_generator.get_rule_types()
876
- rule_type_choices = [rt['id'] for rt in rule_types]
877
-
878
- def validate_api_key(provider, api_key):
879
- """Validate an API key for a specific provider.
880
-
881
- Args:
882
- provider: The LLM provider
883
- api_key: The API key to validate
884
-
885
- Returns:
886
- tuple: (success, message, models)
887
- """
888
- if not provider or not api_key:
889
- return False, "Lütfen bir sağlayıcı seçin ve API anahtarı girin.", [], []
890
-
891
- try:
892
- # Create and initialize the adapter
893
- adapter = factory.create_adapter(provider)
894
-
895
- # Validate the API key
896
- valid = adapter.validate_api_key(api_key)
897
-
898
- if valid:
899
- # Initialize the adapter
900
- adapter.initialize(api_key)
901
-
902
- # Get available models
903
- models = adapter.get_available_models()
904
- model_names = [model['name'] for model in models]
905
- model_ids = [model['id'] for model in models]
906
-
907
- return True, "API anahtarı doğrulandı.", model_names, model_ids
908
- else:
909
- return False, "Geçersiz API anahtarı. Lütfen geçerli bir anahtar girin.", [], []
910
- except Exception as e:
911
- return False, f"Hata: {str(e)}", [], []
912
-
913
- def generate_rule(provider, api_key, model_index, model_ids, rule_type, description, content, globs, referenced_files, prompt, temperature):
914
- """Generate a Cursor Rule.
915
-
916
- Args:
917
- provider: The LLM provider
918
- api_key: The API key for the provider
919
- model_index: The index of the selected model
920
- model_ids: The list of model IDs
921
- rule_type: The type of rule to generate
922
- description: A short description of the rule's purpose
923
- content: The main content of the rule
924
- globs: Glob patterns for Auto Attached rules
925
- referenced_files: Referenced files
926
- prompt: Additional instructions for the LLM
927
- temperature: Temperature parameter for generation
928
-
929
- Returns:
930
- tuple: (success, message, rule)
931
- """
932
- if not provider or not api_key or model_index is None or not rule_type or not description or not content:
933
- return False, "Lütfen tüm gerekli alanları doldurun.", ""
934
-
935
- # Get the model ID
936
- if not model_ids or model_index >= len(model_ids):
937
- return False, "Geçersiz model seçimi.", ""
938
-
939
- model = model_ids[model_index]
940
-
941
- # Validate rule type
942
- if not rule_generator.validate_rule_type(rule_type):
943
- return False, f"Geçersiz kural tipi: {rule_type}", ""
944
-
945
- # Validate globs for Auto Attached rule type
946
- if rule_type == 'Auto Attached' and not globs:
947
- return False, "Auto Attached kural tipi için glob desenleri gereklidir.", ""
948
-
949
- try:
950
- # Prepare parameters
951
- parameters = {
952
- 'globs': globs,
953
- 'referenced_files': referenced_files,
954
- 'prompt': prompt,
955
- 'temperature': float(temperature)
956
- }
957
-
958
- # Generate the rule
959
- rule = rule_generator.create_rule(
960
- provider=provider,
961
- model=model,
962
- rule_type=rule_type,
963
- description=description,
964
- content=content,
965
- api_key=api_key,
966
- parameters=parameters
967
- )
968
-
969
- return True, "Kural başarıyla oluşturuldu.", rule
970
- except Exception as e:
971
- return False, f"Kural oluşturulurken bir hata oluştu: {str(e)}", ""
972
-
973
- def update_rule_type_info(rule_type):
974
- """Update the rule type information.
975
-
976
- Args:
977
- rule_type: The selected rule type
978
-
979
- Returns:
980
- str: Information about the selected rule type
981
- """
982
- if rule_type == 'Always':
983
- return "Her zaman model bağlamına dahil edilir."
984
- elif rule_type == 'Auto Attached':
985
- return "Glob desenine uyan dosyalar referans alındığında dahil edilir."
986
- elif rule_type == 'Agent Requested':
987
- return "Kural AI'ya sunulur, dahil edilip edilmeyeceğine AI karar verir."
988
- elif rule_type == 'Manual':
989
- return "Yalnızca @ruleName kullanılarak açıkça belirtildiğinde dahil edilir."
990
- else:
991
- return ""
992
-
993
- def update_globs_visibility(rule_type):
994
- """Update the visibility of the globs input.
995
-
996
- Args:
997
- rule_type: The selected rule type
998
-
999
- Returns:
1000
- bool: Whether the globs input should be visible
1001
- """
1002
- return rule_type == 'Auto Attached'
1003
-
1004
- # Create Gradio interface
1005
- with gr.Blocks(title="Cursor Rules Oluşturucu") as demo:
1006
- gr.Markdown("# Cursor Rules Oluşturucu")
1007
- gr.Markdown("Gemini, OpenRouter, OpenAI API ve tüm modellerini destekleyen dinamik bir Cursor Rules oluşturucu.")
1008
-
1009
- with gr.Row():
1010
- with gr.Column():
1011
- provider = gr.Dropdown(
1012
- choices=provider_choices,
1013
- label="LLM Sağlayıcı",
1014
- value=provider_choices[0] if provider_choices else None
1015
- )
1016
-
1017
- api_key = gr.Textbox(
1018
- label="API Anahtarı",
1019
- placeholder="API anahtarınızı girin",
1020
- type="password"
1021
- )
1022
-
1023
- validate_btn = gr.Button("API Anahtarını Doğrula")
1024
-
1025
- api_status = gr.Textbox(
1026
- label="API Durumu",
1027
- interactive=False
1028
- )
1029
-
1030
- model_dropdown = gr.Dropdown(
1031
- label="Model",
1032
- choices=[],
1033
- interactive=False
1034
- )
1035
-
1036
- # Hidden field to store model IDs
1037
- model_ids = gr.State([])
1038
-
1039
- rule_type = gr.Dropdown(
1040
- choices=rule_type_choices,
1041
- label="Kural Tipi",
1042
- value=rule_type_choices[0] if rule_type_choices else None
1043
- )
1044
-
1045
- rule_type_info = gr.Textbox(
1046
- label="Kural Tipi Bilgisi",
1047
- interactive=False,
1048
- value=update_rule_type_info(rule_type_choices[0] if rule_type_choices else "")
1049
- )
1050
-
1051
- description = gr.Textbox(
1052
- label="Açıklama",
1053
- placeholder="Kuralın amacını açıklayan kısa bir açıklama"
1054
- )
1055
-
1056
- globs = gr.Textbox(
1057
- label="Glob Desenleri (Auto Attached için)",
1058
- placeholder="Örn: *.ts, src/*.js",
1059
- visible=False
1060
- )
1061
-
1062
- content = gr.Textbox(
1063
- label="Kural İçeriği",
1064
- placeholder="Kuralın ana içeriği",
1065
- lines=10
1066
- )
1067
-
1068
- referenced_files = gr.Textbox(
1069
- label="Referans Dosyaları (İsteğe bağlı)",
1070
- placeholder="Her satıra bir dosya adı girin, örn: @service-template.ts",
1071
- lines=3
1072
- )
1073
-
1074
- prompt = gr.Textbox(
1075
- label="AI Prompt (İsteğe bağlı)",
1076
- placeholder="AI'ya özel talimatlar verin",
1077
- lines=3
1078
- )
1079
-
1080
- temperature = gr.Slider(
1081
- label="Sıcaklık",
1082
- minimum=0.0,
1083
- maximum=1.0,
1084
- value=0.7,
1085
- step=0.1
1086
- )
1087
-
1088
- generate_btn = gr.Button("Kural Oluştur")
1089
-
1090
- with gr.Column():
1091
- generation_status = gr.Textbox(
1092
- label="Durum",
1093
- interactive=False
1094
- )
1095
-
1096
- rule_output = gr.Textbox(
1097
- label="Oluşturulan Kural",
1098
- lines=20,
1099
- interactive=False
1100
- )
1101
-
1102
- download_btn = gr.Button("İndir")
1103
-
1104
- # API key validation
1105
- validate_btn.click(
1106
- fn=validate_api_key,
1107
- inputs=[provider, api_key],
1108
- outputs=[api_status, model_dropdown, model_ids]
1109
- )
1110
-
1111
- # Rule type change
1112
- rule_type.change(
1113
- fn=update_rule_type_info,
1114
- inputs=[rule_type],
1115
- outputs=[rule_type_info]
1116
- )
1117
-
1118
- rule_type.change(
1119
- fn=update_globs_visibility,
1120
- inputs=[rule_type],
1121
- outputs=[globs]
1122
- )
1123
-
1124
- # Generate rule
1125
- generate_btn.click(
1126
- fn=generate_rule,
1127
- inputs=[
1128
- provider,
1129
- api_key,
1130
- model_dropdown,
1131
- model_ids,
1132
- rule_type,
1133
- description,
1134
- content,
1135
- globs,
1136
- referenced_files,
1137
- prompt,
1138
- temperature
1139
- ],
1140
- outputs=[generation_status, rule_output]
1141
- )
1142
-
1143
- # Download rule
1144
- def download_rule(rule, description):
1145
- if not rule:
1146
- return None
1147
-
1148
- # Create file name from description
1149
- file_name = description.lower().replace(" ", "-").replace("/", "-")
1150
- if not file_name:
1151
- file_name = "cursor-rule"
1152
-
1153
- return {
1154
- "name": f"{file_name}.mdc",
1155
- "data": rule
1156
- }
1157
-
1158
- download_btn.click(
1159
- fn=download_rule,
1160
- inputs=[rule_output, description],
1161
- outputs=[gr.File()]
1162
- )
1163
-
1164
- # Launch the app
1165
- if __name__ == "__main__":
1166
- demo.launch(
1167
- server_name="0.0.0.0",
1168
- server_port=int(os.environ.get("PORT", 7860)),
1169
- share=True
1170
- )