File size: 10,702 Bytes
470905d
08ea95c
575baac
107a11e
08ea95c
 
12bbd2a
e605f4b
3556f91
 
 
 
 
c26f5f4
 
 
107a11e
 
 
5859778
 
 
7c98d00
5859778
107a11e
 
 
575baac
 
7c98d00
470905d
575baac
470905d
575baac
 
 
 
 
c26f5f4
575baac
 
7c98d00
470905d
a2d4670
 
c26f5f4
 
3556f91
575baac
7c98d00
575baac
 
 
470905d
 
a2d4670
c26f5f4
3556f91
470905d
e0a08b7
470905d
4026330
c26f5f4
3556f91
470905d
 
 
 
 
 
 
 
e0a08b7
 
 
4026330
e0a08b7
3556f91
e0a08b7
 
 
 
 
 
 
 
12bbd2a
f8a03dd
12bbd2a
f8a03dd
 
 
 
3556f91
 
 
fa2db69
3556f91
f8a03dd
 
fa2db69
3556f91
fa2db69
 
12bbd2a
 
 
 
 
 
 
f8a03dd
12bbd2a
 
3556f91
 
e605f4b
 
 
3556f91
 
 
 
 
 
fa2db69
3556f91
fa2db69
 
e605f4b
 
 
 
 
 
 
 
 
 
3556f91
 
f8a03dd
e605f4b
f8a03dd
 
 
 
3556f91
 
 
fa2db69
3556f91
fa2db69
 
 
f8a03dd
 
 
 
 
 
 
e605f4b
f8a03dd
 
3556f91
f8a03dd
575baac
 
e08abc4
5859778
7c98d00
 
 
 
 
470905d
 
7c98d00
470905d
575baac
3556f91
 
470905d
107a11e
08ea95c
 
 
9abfd37
08ea95c
 
 
 
 
 
 
 
 
 
4026330
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
107a11e
c26f5f4
 
dc85134
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
# app.py
from flask import Flask, request, render_template, jsonify, send_file
from parser import parse_python_code
import os
import json
import io
import subprocess  # To call process_hf_dataset.py
from database import init_chromadb, store_program, query_programs, load_chromadb_from_hf, DB_NAME
import logging

# Set up logging
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)

# User-configurable variables
UPLOAD_DIR = "./uploads"  # Directory for uploads

app = Flask(__name__)

def reconstruct_code(parts):
    """Reconstruct the original code from parsed parts."""
    sorted_parts = sorted(parts, key=lambda p: p['location'][0])
    return ''.join(part['source'] for part in sorted_parts)

@app.route('/', methods=['GET', 'POST'])
def index():
    if request.method == 'POST':
        parts = None
        filename = 'unnamed.py'
        code_input = None
        query_results = None

        # Handle file upload or pasted code (parsing)
        if 'file' in request.files and request.files['file'].filename:
            file = request.files['file']
            if not file.filename.endswith('.py'):
                return 'Invalid file type. Please upload a Python file.', 400
            filename = file.filename
            file_path = os.path.join(UPLOAD_DIR, filename)
            file.save(file_path)
            with open(file_path, 'r') as f:
                code_input = f.read()
            parts, sequence = parse_python_code(code_input)
            # Store in ChromaDB
            client = init_chromadb()
            vectors = [part['vector'] for part in parts]
            store_program(client, code_input, sequence, vectors, DB_NAME)
            logger.info(f"Stored code: {filename}")
        elif 'code' in request.form and request.form['code'].strip():
            code_input = request.form['code']
            filename = request.form.get('filename', 'unnamed.py') or 'unnamed.py'
            if not filename.endswith('.py'):
                filename += '.py'
            parts, sequence = parse_python_code(code_input)
            vectors = [part['vector'] for part in parts]
            client = init_chromadb()
            store_program(client, code_input, sequence, vectors, DB_NAME)
            logger.info(f"Stored code: {filename}")
        elif 'query_ops' in request.form and request.form['query_ops'].strip():
            # Handle query for operations (category sequence)
            operations = [op.strip() for op in request.form['query_ops'].split(',')]
            client = load_chromadb_from_hf()
            query_results = query_programs(client, operations, DB_NAME)
            logger.info(f"Queried operations: {operations}")
            return render_template(
                'results_partial.html',
                parts=None,
                filename=filename,
                reconstructed_code=None,
                code_input=None,
                query_results=query_results
            )
        elif 'semantic_query' in request.form and request.form['semantic_query'].strip():
            # Handle semantic query (natural language description)
            semantic_query = request.form['semantic_query']
            client = load_chromadb_from_hf()
            query_results = query_programs(client, None, DB_NAME, semantic_query=semantic_query)
            logger.info(f"Queried semantically: {semantic_query}")
            return render_template(
                'results_partial.html',
                parts=None,
                filename=filename,
                reconstructed_code=None,
                code_input=None,
                query_results=query_results
            )
        elif 'process_hf' in request.form:
            # Trigger processing of Hugging Face dataset with fresh database
            try:
                # Reset ChromaDB collection
                client = init_chromadb()
                try:
                    client.delete_collection(DB_NAME)
                    logger.info(f"Deleted ChromaDB collection: {DB_NAME}")
                except Exception as e:
                    logger.warning(f"Failed to delete collection {DB_NAME}: {e}")
                collection = client.create_collection(DB_NAME)
                logger.info(f"Created fresh ChromaDB collection: {DB_NAME}")
                
                # Process dataset
                result = subprocess.run(['python', 'process_hf_dataset.py'], check=True, capture_output=True, text=True, cwd=os.path.dirname(__file__))
                logger.info(f"Process Hugging Face dataset output: {result.stdout}")
                if result.stderr:
                    logger.error(f"Process Hugging Face dataset errors: {result.stderr}")
                return render_template(
                    'results_partial.html',
                    parts=None,
                    filename="Hugging Face Dataset Processed",
                    reconstructed_code=None,
                    code_input=None,
                    query_results=None,
                    message="Hugging Face dataset processed and stored successfully with fresh database."
                )
            except subprocess.CalledProcessError as e:
                logger.error(f"Error processing Hugging Face dataset: {e.stderr}")
                return f"Error processing Hugging Face dataset: {e.stderr}", 500
        elif 'load_dataset' in request.form:
            # Trigger loading of Hugging Face dataset without resetting
            try:
                # Check if collection exists, get or create if needed
                client = init_chromadb()
                collection = client.get_or_create_collection(DB_NAME)
                logger.info(f"Using existing or new ChromaDB collection: {DB_NAME}")
                
                # Process dataset
                result = subprocess.run(['python', 'process_hf_dataset.py'], check=True, capture_output=True, text=True, cwd=os.path.dirname(__file__))
                logger.info(f"Load Hugging Face dataset output: {result.stdout}")
                if result.stderr:
                    logger.error(f"Load Hugging Face dataset errors: {result.stderr}")
                return render_template(
                    'results_partial.html',
                    parts=None,
                    filename="Hugging Face Dataset Loaded",
                    reconstructed_code=None,
                    code_input=None,
                    query_results=None,
                    message="Hugging Face dataset loaded and stored successfully."
                )
            except subprocess.CalledProcessError as e:
                logger.error(f"Error loading Hugging Face dataset: {e.stderr}")
                return f"Error loading Hugging Face dataset: {e.stderr}", 500
        elif 'reset_db' in request.form:
            # Reset ChromaDB collection (no repopulation with samples)
            try:
                client = init_chromadb()
                try:
                    client.delete_collection(DB_NAME)
                    logger.info(f"Deleted ChromaDB collection: {DB_NAME}")
                except Exception as e:
                    logger.warning(f"Failed to delete collection {DB_NAME}: {e}")
                collection = client.create_collection(DB_NAME)
                logger.info(f"Created fresh ChromaDB collection: {DB_NAME}")
                # Verify collection creation by checking if it's iterable (fix for 'NoneType' error)
                if collection is None or not hasattr(collection, 'add'):
                    raise ValueError("ChromaDB collection creation failed")
                return render_template(
                    'results_partial.html',
                    parts=None,
                    filename="Database Reset",
                    reconstructed_code=None,
                    code_input=None,
                    query_results=None,
                    message="Database reset successfully."
                )
            except Exception as e:
                logger.error(f"Error resetting database: {e}")
                return f"Error resetting database: {e}", 500

        if parts:
            indexed_parts = [{'index': i + 1, **part} for i, part in enumerate(parts)]
            reconstructed_code = reconstruct_code(indexed_parts)
            return render_template(
                'results_partial.html',
                parts=indexed_parts,
                filename=filename,
                reconstructed_code=reconstructed_code,
                code_input=code_input,
                query_results=None
            )
        return 'No file, code, or query provided', 400

    # Initial page load (start empty, no default population)
    logger.info("Application started, database empty until triggered by buttons")
    return render_template('index.html', parts=None, filename=None, reconstructed_code=None, code_input=None, query_results=None)

@app.route('/export_json', methods=['POST'])
def export_json():
    parts = request.json.get('parts', [])
    export_data = [{'vector': part['vector'], 'source': part['source'], 'description': generate_description_tokens([part['category']], [part['vector']])} for part in parts]
    json_str = json.dumps(export_data, indent=2)
    buffer = io.BytesIO(json_str.encode('utf-8'))
    buffer.seek(0)
    return send_file(
        buffer,
        as_attachment=True,
        download_name='code_vectors.json',
        mimetype='application/json'
    )

def generate_description_tokens(sequence, vectors):
    """Generate semantic description tokens for a program based on its sequence and vectors."""
    tokens = []
    category_descriptions = {
        'import': 'imports module',
        'function': 'defines function',
        'assigned_variable': 'assigns variable',
        'input_variable': 'input parameter',
        'returned_variable': 'returns value',
        'if': 'conditional statement',
        'return': 'returns result',
        'try': 'try block',
        'except': 'exception handler',
        'expression': 'expression statement',
        'spacer': 'empty line or comment'
    }
    
    for cat, vec in zip(sequence, vectors):
        if cat in category_descriptions:
            tokens.append(f"{category_descriptions[cat]}:{cat}")
            # Add vector-derived features (e.g., level, span) as tokens
            tokens.append(f"level:{vec[1]}")
            tokens.append(f"span:{vec[3]:.2f}")
    return " ".join(tokens)

if __name__ == '__main__':
    if not os.path.exists(UPLOAD_DIR):
        os.makedirs(UPLOAD_DIR)
    app.run(host="0.0.0.0", port=7860)  # Bind to all interfaces for Hugging Face Spaces