File size: 15,840 Bytes
470905d
08ea95c
575baac
107a11e
08ea95c
 
12bbd2a
64ba224
3556f91
64ba224
3556f91
 
 
 
c26f5f4
 
 
107a11e
 
 
5859778
 
 
7c98d00
5859778
107a11e
 
 
575baac
 
7c98d00
470905d
575baac
470905d
575baac
 
 
 
 
c26f5f4
575baac
 
7c98d00
f518567
 
 
 
 
 
04dce48
 
 
 
f518567
 
 
575baac
7c98d00
575baac
 
 
f518567
 
 
 
 
 
04dce48
 
 
 
f518567
 
 
470905d
e0a08b7
470905d
f518567
 
 
 
04dce48
 
f518567
 
 
 
 
 
 
 
 
 
 
e0a08b7
 
 
f518567
 
 
 
04dce48
 
f518567
 
 
 
 
 
 
 
 
 
 
12bbd2a
f8a03dd
12bbd2a
f8a03dd
 
 
 
3556f91
 
 
fa2db69
3556f91
04dce48
 
 
 
64ba224
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
f8a03dd
 
fa2db69
3556f91
fa2db69
 
04dce48
 
 
 
12bbd2a
 
 
 
 
 
 
64ba224
12bbd2a
 
3556f91
 
f518567
 
 
e605f4b
 
 
3556f91
 
 
 
04dce48
 
 
 
64ba224
 
 
3556f91
 
fa2db69
3556f91
fa2db69
 
04dce48
 
 
 
e605f4b
 
 
 
 
 
 
 
 
 
3556f91
 
f518567
 
 
f8a03dd
64ba224
f8a03dd
 
 
 
3556f91
 
 
fa2db69
3556f91
f518567
fa2db69
 
f518567
04dce48
 
 
64ba224
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
f8a03dd
 
 
 
 
 
 
64ba224
f8a03dd
 
3556f91
f8a03dd
575baac
 
e08abc4
5859778
7c98d00
 
 
 
 
470905d
 
7c98d00
470905d
575baac
3556f91
 
470905d
107a11e
08ea95c
 
 
9abfd37
08ea95c
 
 
 
 
 
 
 
 
 
4026330
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
107a11e
c26f5f4
 
dc85134
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
# app.py
from flask import Flask, request, render_template, jsonify, send_file
from parser import parse_python_code
import os
import json
import io
import subprocess  # To call process_hf_dataset.py
from database import init_chromadb, store_program, query_programs, load_chromadb_from_hf, DB_NAME, create_collection, save_chromadb_to_hf
import logging
from datasets import Dataset

# Set up logging
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)

# User-configurable variables
UPLOAD_DIR = "./uploads"  # Directory for uploads

app = Flask(__name__)

def reconstruct_code(parts):
    """Reconstruct the original code from parsed parts."""
    sorted_parts = sorted(parts, key=lambda p: p['location'][0])
    return ''.join(part['source'] for part in sorted_parts)

@app.route('/', methods=['GET', 'POST'])
def index():
    if request.method == 'POST':
        parts = None
        filename = 'unnamed.py'
        code_input = None
        query_results = None

        # Handle file upload or pasted code (parsing)
        if 'file' in request.files and request.files['file'].filename:
            file = request.files['file']
            if not file.filename.endswith('.py'):
                return 'Invalid file type. Please upload a Python file.', 400
            filename = file.filename
            file_path = os.path.join(UPLOAD_DIR, filename)
            file.save(file_path)
            with open(file_path, 'r') as f:
                code_input = f.read()
            try:
                parts, sequence = parse_python_code(code_input)
                client = init_chromadb()
                vectors = [part['vector'] for part in parts]
                store_program(client, code_input, sequence, vectors, DB_NAME)
                logger.info(f"Stored code: {filename}")
                # Verify storage
                collection = create_collection(client, DB_NAME)
                count = collection.count()
                logger.info(f"ChromaDB now contains {count} entries")
            except Exception as e:
                logger.error(f"Error storing code {filename}: {e}")
                return f"Error storing code: {e}", 500
        elif 'code' in request.form and request.form['code'].strip():
            code_input = request.form['code']
            filename = request.form.get('filename', 'unnamed.py') or 'unnamed.py'
            if not filename.endswith('.py'):
                filename += '.py'
            try:
                parts, sequence = parse_python_code(code_input)
                client = init_chromadb()
                vectors = [part['vector'] for part in parts]
                store_program(client, code_input, sequence, vectors, DB_NAME)
                logger.info(f"Stored code: {filename}")
                # Verify storage
                collection = create_collection(client, DB_NAME)
                count = collection.count()
                logger.info(f"ChromaDB now contains {count} entries")
            except Exception as e:
                logger.error(f"Error storing code {filename}: {e}")
                return f"Error storing code: {e}", 500
        elif 'query_ops' in request.form and request.form['query_ops'].strip():
            # Handle query for operations (category sequence)
            operations = [op.strip() for op in request.form['query_ops'].split(',')]
            try:
                client = load_chromadb_from_hf()
                query_results = query_programs(client, operations, DB_NAME)
                logger.info(f"Queried operations: {operations}")
                # Verify query results
                logger.info(f"Found {len(query_results)} matching programs in ChromaDB")
                return render_template(
                    'results_partial.html',
                    parts=None,
                    filename=filename,
                    reconstructed_code=None,
                    code_input=None,
                    query_results=query_results
                )
            except Exception as e:
                logger.error(f"Error querying operations: {e}")
                return f"Error querying operations: {e}", 500
        elif 'semantic_query' in request.form and request.form['semantic_query'].strip():
            # Handle semantic query (natural language description)
            semantic_query = request.form['semantic_query']
            try:
                client = load_chromadb_from_hf()
                query_results = query_programs(client, None, DB_NAME, semantic_query=semantic_query)
                logger.info(f"Queried semantically: {semantic_query}")
                # Verify query results
                logger.info(f"Found {len(query_results)} matching programs in ChromaDB")
                return render_template(
                    'results_partial.html',
                    parts=None,
                    filename=filename,
                    reconstructed_code=None,
                    code_input=None,
                    query_results=query_results
                )
            except Exception as e:
                logger.error(f"Error querying semantically: {e}")
                return f"Error querying semantically: {e}", 500
        elif 'process_hf' in request.form:
            # Trigger processing of Hugging Face dataset with fresh database
            try:
                # Reset ChromaDB collection
                client = init_chromadb()
                try:
                    client.delete_collection(DB_NAME)
                    logger.info(f"Deleted ChromaDB collection: {DB_NAME}")
                except Exception as e:
                    logger.warning(f"Failed to delete collection {DB_NAME}: {e}")
                collection = client.create_collection(DB_NAME)
                logger.info(f"Created fresh ChromaDB collection: {DB_NAME}")
                # Verify collection
                if collection is None or not hasattr(collection, 'add'):
                    raise ValueError("ChromaDB collection creation failed")
                logger.info("Verified ChromaDB collection is valid")
                # Verify collection is empty
                count = collection.count()
                logger.info(f"ChromaDB now contains {count} entries after reset (should be 0)")
                
                # Reset Hugging Face dataset (replace with empty dataset)
                try:
                    empty_data = {
                        "code": [],
                        "sequence": [],
                        "vectors": [],
                        "description_tokens": [],
                        "program_vectors": []
                    }
                    empty_dataset = Dataset.from_dict(empty_data)
                    empty_dataset.push_to_hub(HF_DATASET_NAME, token=os.getenv("HF_KEY"))
                    logger.info(f"Replaced Hugging Face dataset {HF_DATASET_NAME} with empty dataset")
                except Exception as e:
                    logger.error(f"Error replacing Hugging Face dataset: {e}")
                    raise
                
                # Process dataset
                result = subprocess.run(['python', 'process_hf_dataset.py'], check=True, capture_output=True, text=True, cwd=os.path.dirname(__file__))
                logger.info(f"Process Hugging Face dataset output: {result.stdout}")
                if result.stderr:
                    logger.error(f"Process Hugging Face dataset errors: {result.stderr}")
                # Verify database population
                collection = create_collection(client, DB_NAME)
                count = collection.count()
                logger.info(f"ChromaDB now contains {count} entries after processing")
                return render_template(
                    'results_partial.html',
                    parts=None,
                    filename="Hugging Face Dataset Processed",
                    reconstructed_code=None,
                    code_input=None,
                    query_results=None,
                    message="Hugging Face dataset processed and stored successfully with fresh database and empty dataset."
                )
            except subprocess.CalledProcessError as e:
                logger.error(f"Error processing Hugging Face dataset: {e.stderr}")
                return f"Error processing Hugging Face dataset: {e.stderr}", 500
            except Exception as e:
                logger.error(f"Unexpected error processing Hugging Face dataset: {e}")
                return f"Unexpected error processing Hugging Face dataset: {e}", 500
        elif 'load_dataset' in request.form:
            # Trigger loading of Hugging Face dataset without resetting
            try:
                # Check if collection exists, get or create if needed
                client = init_chromadb()
                collection = client.get_or_create_collection(DB_NAME)
                logger.info(f"Using existing or new ChromaDB collection: {DB_NAME}")
                # Verify collection
                if collection is None or not hasattr(collection, 'add'):
                    raise ValueError("ChromaDB collection access failed")
                logger.info("Verified ChromaDB collection is valid")
                # Verify collection state
                count = collection.count()
                logger.info(f"ChromaDB contains {count} entries before loading")
                
                # Process dataset
                result = subprocess.run(['python', 'process_hf_dataset.py'], check=True, capture_output=True, text=True, cwd=os.path.dirname(__file__))
                logger.info(f"Load Hugging Face dataset output: {result.stdout}")
                if result.stderr:
                    logger.error(f"Load Hugging Face dataset errors: {result.stderr}")
                # Verify database population
                collection = create_collection(client, DB_NAME)
                count = collection.count()
                logger.info(f"ChromaDB now contains {count} entries after loading")
                return render_template(
                    'results_partial.html',
                    parts=None,
                    filename="Hugging Face Dataset Loaded",
                    reconstructed_code=None,
                    code_input=None,
                    query_results=None,
                    message="Hugging Face dataset loaded and stored successfully."
                )
            except subprocess.CalledProcessError as e:
                logger.error(f"Error loading Hugging Face dataset: {e.stderr}")
                return f"Error loading Hugging Face dataset: {e.stderr}", 500
            except Exception as e:
                logger.error(f"Unexpected error loading Hugging Face dataset: {e}")
                return f"Unexpected error loading Hugging Face dataset: {e}", 500
        elif 'reset_db' in request.form:
            # Reset ChromaDB collection and Hugging Face dataset (no repopulation with samples)
            try:
                client = init_chromadb()
                try:
                    client.delete_collection(DB_NAME)
                    logger.info(f"Deleted ChromaDB collection: {DB_NAME}")
                except Exception as e:
                    logger.warning(f"Failed to delete collection {DB_NAME}: {e}")
                collection = client.create_collection(DB_NAME)
                logger.info(f"Created fresh ChromaDB collection: {DB_NAME}")
                # Verify collection creation
                if collection is None or not hasattr(collection, 'add'):
                    raise ValueError("ChromaDB collection creation failed")
                logger.info("Verified ChromaDB collection is valid")
                # Verify collection is empty
                count = collection.count()
                logger.info(f"ChromaDB now contains {count} entries after reset (should be 0)")
                
                # Reset Hugging Face dataset (replace with empty dataset)
                try:
                    empty_data = {
                        "code": [],
                        "sequence": [],
                        "vectors": [],
                        "description_tokens": [],
                        "program_vectors": []
                    }
                    empty_dataset = Dataset.from_dict(empty_data)
                    empty_dataset.push_to_hub(HF_DATASET_NAME, token=os.getenv("HF_KEY"))
                    logger.info(f"Replaced Hugging Face dataset {HF_DATASET_NAME} with empty dataset")
                except Exception as e:
                    logger.error(f"Error replacing Hugging Face dataset: {e}")
                    raise
                
                return render_template(
                    'results_partial.html',
                    parts=None,
                    filename="Database Reset",
                    reconstructed_code=None,
                    code_input=None,
                    query_results=None,
                    message="Database and Hugging Face dataset reset successfully."
                )
            except Exception as e:
                logger.error(f"Error resetting database: {e}")
                return f"Error resetting database: {e}", 500

        if parts:
            indexed_parts = [{'index': i + 1, **part} for i, part in enumerate(parts)]
            reconstructed_code = reconstruct_code(indexed_parts)
            return render_template(
                'results_partial.html',
                parts=indexed_parts,
                filename=filename,
                reconstructed_code=reconstructed_code,
                code_input=code_input,
                query_results=None
            )
        return 'No file, code, or query provided', 400

    # Initial page load (start empty, no default population)
    logger.info("Application started, database empty until triggered by buttons")
    return render_template('index.html', parts=None, filename=None, reconstructed_code=None, code_input=None, query_results=None)

@app.route('/export_json', methods=['POST'])
def export_json():
    parts = request.json.get('parts', [])
    export_data = [{'vector': part['vector'], 'source': part['source'], 'description': generate_description_tokens([part['category']], [part['vector']])} for part in parts]
    json_str = json.dumps(export_data, indent=2)
    buffer = io.BytesIO(json_str.encode('utf-8'))
    buffer.seek(0)
    return send_file(
        buffer,
        as_attachment=True,
        download_name='code_vectors.json',
        mimetype='application/json'
    )

def generate_description_tokens(sequence, vectors):
    """Generate semantic description tokens for a program based on its sequence and vectors."""
    tokens = []
    category_descriptions = {
        'import': 'imports module',
        'function': 'defines function',
        'assigned_variable': 'assigns variable',
        'input_variable': 'input parameter',
        'returned_variable': 'returns value',
        'if': 'conditional statement',
        'return': 'returns result',
        'try': 'try block',
        'except': 'exception handler',
        'expression': 'expression statement',
        'spacer': 'empty line or comment'
    }
    
    for cat, vec in zip(sequence, vectors):
        if cat in category_descriptions:
            tokens.append(f"{category_descriptions[cat]}:{cat}")
            # Add vector-derived features (e.g., level, span) as tokens
            tokens.append(f"level:{vec[1]}")
            tokens.append(f"span:{vec[3]:.2f}")
    return " ".join(tokens)

if __name__ == '__main__':
    if not os.path.exists(UPLOAD_DIR):
        os.makedirs(UPLOAD_DIR)
    app.run(host="0.0.0.0", port=7860)  # Bind to all interfaces for Hugging Face Spaces