File size: 6,110 Bytes
470905d
08ea95c
575baac
107a11e
08ea95c
 
e0a08b7
c26f5f4
 
 
107a11e
 
 
5859778
 
 
7c98d00
5859778
107a11e
 
 
575baac
 
7c98d00
470905d
575baac
470905d
575baac
 
 
 
 
c26f5f4
575baac
 
7c98d00
470905d
a2d4670
 
c26f5f4
 
575baac
7c98d00
575baac
 
 
470905d
 
a2d4670
c26f5f4
470905d
e0a08b7
470905d
c26f5f4
 
470905d
 
 
 
 
 
 
 
e0a08b7
 
 
 
 
 
 
 
 
 
 
 
 
575baac
 
e08abc4
5859778
7c98d00
 
 
 
 
470905d
 
7c98d00
470905d
575baac
470905d
c26f5f4
 
 
 
 
 
 
470905d
107a11e
08ea95c
 
 
e0a08b7
 
08ea95c
 
 
 
 
 
 
 
 
 
e0a08b7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
107a11e
c26f5f4
 
e92f37c
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
# app.py
from flask import Flask, request, render_template, jsonify, send_file
from parser import parse_python_code
import os
import json
import io
from database import init_chromadb, populate_sample_db, store_program, query_programs, load_chromadb_from_hf, HF_DATASET_NAME, HF_TOKEN, DB_NAME

# User-configurable variables
UPLOAD_DIR = "./uploads"  # Directory for uploads

app = Flask(__name__)

def reconstruct_code(parts):
    """Reconstruct the original code from parsed parts."""
    sorted_parts = sorted(parts, key=lambda p: p['location'][0])
    return ''.join(part['source'] for part in sorted_parts)

@app.route('/', methods=['GET', 'POST'])
def index():
    if request.method == 'POST':
        parts = None
        filename = 'unnamed.py'
        code_input = None
        query_results = None

        # Handle file upload or pasted code (parsing)
        if 'file' in request.files and request.files['file'].filename:
            file = request.files['file']
            if not file.filename.endswith('.py'):
                return 'Invalid file type. Please upload a Python file.', 400
            filename = file.filename
            file_path = os.path.join(UPLOAD_DIR, filename)
            file.save(file_path)
            with open(file_path, 'r') as f:
                code_input = f.read()
            parts, sequence = parse_python_code(code_input)
            # Store in ChromaDB
            client = init_chromadb()
            vectors = [part['vector'] for part in parts]
            store_program(client, code_input, sequence, vectors, DB_NAME)
        elif 'code' in request.form and request.form['code'].strip():
            code_input = request.form['code']
            filename = request.form.get('filename', 'unnamed.py') or 'unnamed.py'
            if not filename.endswith('.py'):
                filename += '.py'
            parts, sequence = parse_python_code(code_input)
            vectors = [part['vector'] for part in parts]
            client = init_chromadb()
            store_program(client, code_input, sequence, vectors, DB_NAME)
        elif 'query_ops' in request.form and request.form['query_ops'].strip():
            # Handle query for operations (category sequence)
            operations = [op.strip() for op in request.form['query_ops'].split(',')]
            client = load_chromadb_from_hf(HF_DATASET_NAME, HF_TOKEN)  # Load from Hugging Face
            query_results = query_programs(client, operations, DB_NAME)
            return render_template(
                'results_partial.html',
                parts=None,
                filename=filename,
                reconstructed_code=None,
                code_input=None,
                query_results=query_results
            )
        elif 'semantic_query' in request.form and request.form['semantic_query'].strip():
            # Handle semantic query (natural language description)
            semantic_query = request.form['semantic_query']
            client = load_chromadb_from_hf(HF_DATASET_NAME, HF_TOKEN)  # Load from Hugging Face
            query_results = query_programs(client, None, DB_NAME, semantic_query=semantic_query)
            return render_template(
                'results_partial.html',
                parts=None,
                filename=filename,
                reconstructed_code=None,
                code_input=None,
                query_results=query_results
            )

        if parts:
            indexed_parts = [{'index': i + 1, **part} for i, part in enumerate(parts)]
            reconstructed_code = reconstruct_code(indexed_parts)
            return render_template(
                'results_partial.html',
                parts=indexed_parts,
                filename=filename,
                reconstructed_code=reconstructed_code,
                code_input=code_input,
                query_results=None
            )
        return 'No file, code, or query provided', 400

    # Initial page load
    client = load_chromadb_from_hf(HF_DATASET_NAME, HF_TOKEN)  # Load from Hugging Face on startup
    # If no dataset exists locally, populate with samples
    try:
        if not client.list_collections()[0].name == DB_NAME:
            populate_sample_db(client)
    except:
        populate_sample_db(client)
    return render_template('index.html', parts=None, filename=None, reconstructed_code=None, code_input=None, query_results=None)

@app.route('/export_json', methods=['POST'])
def export_json():
    parts = request.json.get('parts', [])
    export_data = [{'vector': part['vector'], 'source': part['source'], 'description': generate_description_tokens([part['category']], [part['vector']])}
                   for part in parts]
    json_str = json.dumps(export_data, indent=2)
    buffer = io.BytesIO(json_str.encode('utf-8'))
    buffer.seek(0)
    return send_file(
        buffer,
        as_attachment=True,
        download_name='code_vectors.json',
        mimetype='application/json'
    )

def generate_description_tokens(sequence, vectors):
    """Generate semantic description tokens for a program based on its sequence and vectors."""
    tokens = []
    category_descriptions = {
        'import': 'imports module',
        'function': 'defines function',
        'assigned_variable': 'assigns variable',
        'input_variable': 'input parameter',
        'returned_variable': 'returns value',
        'if': 'conditional statement',
        'return': 'returns result',
        'try': 'try block',
        'except': 'exception handler',
        'expression': 'expression statement',
        'spacer': 'empty line or comment'
    }
    
    for cat, vec in zip(sequence, vectors):
        if cat in category_descriptions:
            tokens.append(f"{category_descriptions[cat]}:{cat}")
            # Add vector-derived features (e.g., level, span) as tokens
            tokens.append(f"level:{vec[1]}")
            tokens.append(f"span:{vec[3]:.2f}")
    return " ".join(tokens)

if __name__ == '__main__':
    if not os.path.exists(UPLOAD_DIR):
        os.makedirs(UPLOAD_DIR)
    app.run(host="0.0.0.0", port=7860)