File size: 7,206 Bytes
03f155a
 
 
 
d647402
2de1374
654e3a2
 
d647402
f1ba02a
407358f
654e3a2
 
 
 
3b59fe8
03f155a
407358f
2f6d918
 
 
 
3b59fe8
2f6d918
 
 
407358f
03f155a
654e3a2
03f155a
 
 
b6e66c6
03f155a
 
654e3a2
b6e66c6
 
 
a6f9b25
 
b6e66c6
 
654e3a2
03f155a
a6f9b25
03f155a
407358f
 
03f155a
 
 
 
 
b70740a
03f155a
b70740a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7a57cb1
 
 
a69db2f
b415203
7a57cb1
 
 
 
 
 
 
 
 
 
 
 
 
b415203
18f39f1
ecfbc68
 
18f39f1
 
ecfbc68
18f39f1
 
ecfbc68
18f39f1
b415203
 
ecfbc68
b415203
18f39f1
b415203
 
 
ecfbc68
b415203
 
ecfbc68
b415203
 
18f39f1
ecfbc68
18f39f1
407358f
a69db2f
407358f
c93a6f3
2de1374
3b59fe8
654e3a2
 
 
 
 
3b59fe8
654e3a2
 
 
 
 
 
 
3b59fe8
407358f
654e3a2
 
 
407358f
654e3a2
 
407358f
3b59fe8
654e3a2
 
 
 
40082db
03f155a
654e3a2
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
import streamlit as st
import requests
import os
import urllib
import base64
from bs4 import BeautifulSoup
import hashlib
import json

EXCLUDED_FILES = ['app.py', 'requirements.txt', 'pre-requirements.txt', 'packages.txt', 'README.md','.gitattributes', "backup.py","Dockerfile"]

# Create a history.json file if it doesn't exist yet
if not os.path.exists("history.json"):
    with open("history.json", "w") as f:
        json.dump({}, f)

def download_file(url, local_filename):
    if url.startswith('http://') or url.startswith('https://'):
        try:
            with requests.get(url, stream=True) as r:
                r.raise_for_status()
                with open(local_filename, 'wb') as f:
                    for chunk in r.iter_content(chunk_size=8192):
                        f.write(chunk)
            return local_filename
        except requests.exceptions.HTTPError as err:
            print(f"HTTP error occurred: {err}")

def download_html_and_files(url, subdir):
    html_content = requests.get(url).text
    soup = BeautifulSoup(html_content, 'html.parser')
    base_url = urllib.parse.urlunparse(urllib.parse.urlparse(url)._replace(path='', params='', query='', fragment=''))

    for link in soup.find_all('a'):
        file_url = urllib.parse.urljoin(base_url, link.get('href'))
        local_filename = os.path.join(subdir, urllib.parse.urlparse(file_url).path.split('/')[-1])

        # Skip if the local filename is a directory
        if not local_filename.endswith('/') and local_filename != subdir:
            link['href'] = local_filename
            download_file(file_url, local_filename)
    
    # Save the modified HTML content
    with open(os.path.join(subdir, "index.html"), "w") as file:
        file.write(str(soup))

def list_files(directory_path='.'):
    files = [f for f in os.listdir(directory_path) if os.path.isfile(os.path.join(directory_path, f))]
    return [f for f in files if f not in EXCLUDED_FILES]

def get_download_link(file):
    with open(file, "rb") as f:
        bytes = f.read()
        b64 = base64.b64encode(bytes).decode()
        href = f'<a href="data:file/octet-stream;base64,{b64}" download=\'{os.path.basename(file)}\'>Click to download {os.path.basename(file)}</a>'
    return href

def file_expander(file_path):
    with st.expander(os.path.basename(file_path)):
        # Edit functionality
        file_content = ''
        if 'edit_content' in st.session_state and st.session_state['edit_content'][0] == file_path:
            file_content = st.session_state['edit_content'][1]
        else:
            with open(file_path, "r") as f:
                file_content = f.read()
        
        edited_content = st.text_area("Edit file content:", value=file_content, height=250, key=f"textarea_{file_path}")
        if st.button("💾 Save", key=f"save_{file_path}"):
            with open(file_path, "w") as f:
                f.write(edited_content)
            st.success(f"File {os.path.basename(file_path)} saved!")
            st.session_state['edit_content'] = (file_path, edited_content)

        # Download link
        st.markdown(get_download_link(file_path), unsafe_allow_html=True)

        # Delete button
        if st.button(f"🗑️ Delete {os.path.basename(file_path)}", key=f"delete_{file_path}"):
            os.remove(file_path)
            st.success(f"File {os.path.basename(file_path)} deleted!")
            # Update the listing by removing the deleted file
            st.experimental_rerun()

def show_download_links(subdir):
    st.write(f'Files in {subdir}:')
    for file in list_files(subdir):
        file_path = os.path.join(subdir, file)
        if os.path.isfile(file_path):
            file_expander(file_path)
            

    
def generate_hash_key(path, counter):
    """Generate a unique hash key for a given file path and counter."""
    return hashlib.md5(f"{path}_{counter}".encode()).hexdigest()

def show_file_operations(file_path):
    # Increment counter for each file path
    counter_key = f"counter_{file_path}"
    if counter_key in st.session_state:
        st.session_state[counter_key] += 1
    else:
        st.session_state[counter_key] = 1

    # Unique hash keys for each file and operation based on their path and counter
    counter = st.session_state[counter_key]
    edit_button_key = f"edit_button_{generate_hash_key(file_path, counter)}"
    save_button_key = f"save_button_{generate_hash_key(file_path, counter)}"
    delete_button_key = f"delete_button_{generate_hash_key(file_path, counter)}"
    content_key = f"content_{generate_hash_key(file_path, counter)}"

    # Start Edit operation
    if st.button(f"✏️ Edit {os.path.basename(file_path)}", key=edit_button_key):
        if edit_button_key not in st.session_state:
            with open(file_path, "r") as f:
                st.session_state[content_key] = f.read()
        st.session_state[edit_button_key] = True

    # Display text area for editing if in edit mode
    if st.session_state.get(edit_button_key, False):
        edited_content = st.text_area("Edit the file content:", value=st.session_state.get(content_key, ""), height=250, key=content_key)

        # Save button
        if st.button(f"💾 Save {os.path.basename(file_path)}", key=save_button_key):
            with open(file_path, "w") as f:
                f.write(edited_content)
            new_file_size = os.path.getsize(file_path)
            download_link = get_download_link(file_path)
            st.markdown(f"✅ File **{os.path.basename(file_path)}** saved! ([{download_link}]) - New size: {new_file_size} bytes", unsafe_allow_html=True)
            st.session_state[edit_button_key] = False  # Exit edit mode

    # Delete button
    if st.button(f"🗑️ Delete {os.path.basename(file_path)}", key=delete_button_key):
        os.remove(file_path)
        st.markdown(f"🎉 File {os.path.basename(file_path)} deleted!")
        # Remove state variables related to the deleted file
        st.session_state.pop(edit_button_key, None)
        st.session_state.pop(content_key, None)


def main():
    st.sidebar.title('Web Datasets Bulk Downloader')
    url = st.sidebar.text_input('Please enter a Web URL to bulk download text and files')

    # Load history
    with open("history.json", "r") as f:
        history = json.load(f)

    # Save the history of URL entered as a json file
    if url:
        subdir = hashlib.md5(url.encode()).hexdigest()
        if not os.path.exists(subdir):
            os.makedirs(subdir)
        if url not in history:
            history[url] = subdir
            with open("history.json", "w") as f:
                json.dump(history, f)

    if st.sidebar.button('📥 Get All the Content'):
        download_html_and_files(url, history[url])
        show_download_links(history[url])

    if st.sidebar.button('📂 Show Download Links'):
        for subdir in history.values():
            show_download_links(subdir)

    # Display history as markdown
    with st.expander("URL History and Downloaded Files"):
        for url, subdir in history.items():
            st.markdown(f"#### {url}")
            show_download_links(subdir)

if __name__ == "__main__":
    main()