dsmultimedika commited on
Commit
d57efd6
·
1 Parent(s): bb39d30

Improve the code bot development

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitignore +2 -1
  2. alembic.ini +117 -0
  3. alembic/README +1 -0
  4. alembic/env.py +86 -0
  5. alembic/script.py.mako +26 -0
  6. alembic/versions/404f8a028e0e_add_bot_name.py +30 -0
  7. alembic/versions/426e52aa13aa_migration_description.py +91 -0
  8. alembic/versions/4dd226fee84e_add_updated_data.py +30 -0
  9. alembic/versions/6bd7ce57dca0_add_updated_at.py +30 -0
  10. alembic/versions/6e972ae8b93b_add_updated_at_session.py +32 -0
  11. alembic/versions/82908d30ae5a_add_updated_data.py +50 -0
  12. alembic/versions/98b1b4a0de39_add_username.py +32 -0
  13. alembic/versions/b1df0377cbe2_change_time_zome.py +30 -0
  14. alembic/versions/c0e9d62caae8_change_time_zome.py +30 -0
  15. alembic/versions/c818e5b84075_add_role_id.py +108 -0
  16. alembic/versions/fcc0580a4769_add_updated_model_planning.py +48 -0
  17. api/auth.py +71 -0
  18. api/events.py +6 -3
  19. api/function.py +89 -64
  20. api/router/book.py +278 -0
  21. api/router/book_collection.py +188 -0
  22. api/router/bot_general.py +94 -0
  23. api/router/bot_one.py +151 -0
  24. api/router/bot_specific.py +342 -0
  25. api/router/category.py +158 -0
  26. api/router/role.py +98 -10
  27. api/router/trial.py +4 -5
  28. api/router/user.py +141 -8
  29. app.py +28 -6
  30. config.py +1 -0
  31. controller/__init__.py +0 -0
  32. controller/book_collection_controller.py +0 -0
  33. controller/book_controller.py +0 -0
  34. controller/bot_general_controller.py +0 -0
  35. controller/bot_one_controller.py +0 -0
  36. controller/bot_specific_controller.py +0 -0
  37. controller/category_controller.py +0 -0
  38. controller/user_controller.py +0 -0
  39. core/chat/chatstore.py +102 -34
  40. core/chat/engine.py +27 -39
  41. core/parser.py +10 -0
  42. core/prompt.py +9 -5
  43. core/summarization/summarizer.py +5 -5
  44. db/database.py +58 -0
  45. db/delete_data.py +1 -1
  46. db/get_data.py +8 -6
  47. db/models.py +164 -0
  48. db/save_data.py +13 -4
  49. db/update_data.py +8 -9
  50. helper/bot_function.py +0 -0
.gitignore CHANGED
@@ -397,4 +397,5 @@ FodyWeavers.xsd
397
  # JetBrains Rider
398
  *.sln.iml
399
 
400
- .env
 
 
397
  # JetBrains Rider
398
  *.sln.iml
399
 
400
+ .env
401
+ *.pem
alembic.ini ADDED
@@ -0,0 +1,117 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # A generic, single database configuration.
2
+
3
+ [alembic]
4
+ # path to migration scripts
5
+ # Use forward slashes (/) also on windows to provide an os agnostic path
6
+ script_location = alembic
7
+
8
+ # template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
9
+ # Uncomment the line below if you want the files to be prepended with date and time
10
+ # see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
11
+ # for all available tokens
12
+ # file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
13
+
14
+ # sys.path path, will be prepended to sys.path if present.
15
+ # defaults to the current working directory.
16
+ prepend_sys_path = .
17
+
18
+ # timezone to use when rendering the date within the migration file
19
+ # as well as the filename.
20
+ # If specified, requires the python>=3.9 or backports.zoneinfo library.
21
+ # Any required deps can installed by adding `alembic[tz]` to the pip requirements
22
+ # string value is passed to ZoneInfo()
23
+ # leave blank for localtime
24
+ # timezone =
25
+
26
+ # max length of characters to apply to the "slug" field
27
+ # truncate_slug_length = 40
28
+
29
+ # set to 'true' to run the environment during
30
+ # the 'revision' command, regardless of autogenerate
31
+ # revision_environment = false
32
+
33
+ # set to 'true' to allow .pyc and .pyo files without
34
+ # a source .py file to be detected as revisions in the
35
+ # versions/ directory
36
+ # sourceless = false
37
+
38
+ # version location specification; This defaults
39
+ # to alembic/versions. When using multiple version
40
+ # directories, initial revisions must be specified with --version-path.
41
+ # The path separator used here should be the separator specified by "version_path_separator" below.
42
+ # version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
43
+
44
+ # version path separator; As mentioned above, this is the character used to split
45
+ # version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
46
+ # If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
47
+ # Valid values for version_path_separator are:
48
+ #
49
+ # version_path_separator = :
50
+ # version_path_separator = ;
51
+ # version_path_separator = space
52
+ # version_path_separator = newline
53
+ version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
54
+
55
+ # set to 'true' to search source files recursively
56
+ # in each "version_locations" directory
57
+ # new in Alembic version 1.10
58
+ # recursive_version_locations = false
59
+
60
+ # the output encoding used when revision files
61
+ # are written from script.py.mako
62
+ # output_encoding = utf-8
63
+
64
+ sqlalchemy.url = placeholder_url
65
+
66
+
67
+ [post_write_hooks]
68
+ # post_write_hooks defines scripts or Python functions that are run
69
+ # on newly generated revision scripts. See the documentation for further
70
+ # content and examples
71
+
72
+ # format using "black" - use the console_scripts runner, against the "black" entrypoint
73
+ # hooks = black
74
+ # black.type = console_scripts
75
+ # black.entrypoint = black
76
+ # black.options = -l 79 REVISION_SCRIPT_FILENAME
77
+
78
+ # lint with attempts to fix using "ruff" - use the exec runner, execute a binary
79
+ # hooks = ruff
80
+ # ruff.type = exec
81
+ # ruff.executable = %(here)s/.venv/bin/ruff
82
+ # ruff.options = --fix REVISION_SCRIPT_FILENAME
83
+
84
+ # Logging configuration
85
+ [loggers]
86
+ keys = root,sqlalchemy,alembic
87
+
88
+ [handlers]
89
+ keys = console
90
+
91
+ [formatters]
92
+ keys = generic
93
+
94
+ [logger_root]
95
+ level = WARN
96
+ handlers = console
97
+ qualname =
98
+
99
+ [logger_sqlalchemy]
100
+ level = WARN
101
+ handlers =
102
+ qualname = sqlalchemy.engine
103
+
104
+ [logger_alembic]
105
+ level = INFO
106
+ handlers =
107
+ qualname = alembic
108
+
109
+ [handler_console]
110
+ class = StreamHandler
111
+ args = (sys.stderr,)
112
+ level = NOTSET
113
+ formatter = generic
114
+
115
+ [formatter_generic]
116
+ format = %(levelname)-5.5s [%(name)s] %(message)s
117
+ datefmt = %H:%M:%S
alembic/README ADDED
@@ -0,0 +1 @@
 
 
1
+ Generic single-database configuration.
alembic/env.py ADDED
@@ -0,0 +1,86 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from logging.config import fileConfig
2
+
3
+ from sqlalchemy import engine_from_config
4
+ from sqlalchemy import pool
5
+ from dotenv import load_dotenv
6
+ from alembic import context
7
+ from db.models import Base
8
+
9
+ import os
10
+
11
+ load_dotenv()
12
+
13
+ # this is the Alembic Config object, which provides
14
+ # access to the values within the .ini file in use.
15
+ config = context.config
16
+
17
+ # Interpret the config file for Python logging.
18
+ # This line sets up loggers basically.
19
+ if config.config_file_name is not None:
20
+ fileConfig(config.config_file_name)
21
+
22
+ config.set_main_option(
23
+ "sqlalchemy.url", os.getenv("DB_URI_SQL_ALCHEMY")
24
+ )
25
+ # add your model's MetaData object here
26
+ # for 'autogenerate' support
27
+ # from myapp import mymodel
28
+ # target_metadata = mymodel.Base.metadata
29
+ target_metadata = Base.metadata
30
+
31
+ # other values from the config, defined by the needs of env.py,
32
+ # can be acquired:
33
+ # my_important_option = config.get_main_option("my_important_option")
34
+ # ... etc.
35
+
36
+
37
+ def run_migrations_offline() -> None:
38
+ """Run migrations in 'offline' mode.
39
+
40
+ This configures the context with just a URL
41
+ and not an Engine, though an Engine is acceptable
42
+ here as well. By skipping the Engine creation
43
+ we don't even need a DBAPI to be available.
44
+
45
+ Calls to context.execute() here emit the given string to the
46
+ script output.
47
+
48
+ """
49
+ url = config.get_main_option("sqlalchemy.url")
50
+ context.configure(
51
+ url=url,
52
+ target_metadata=target_metadata,
53
+ literal_binds=True,
54
+ dialect_opts={"paramstyle": "named"},
55
+ )
56
+
57
+ with context.begin_transaction():
58
+ context.run_migrations()
59
+
60
+
61
+ def run_migrations_online() -> None:
62
+ """Run migrations in 'online' mode.
63
+
64
+ In this scenario we need to create an Engine
65
+ and associate a connection with the context.
66
+
67
+ """
68
+ connectable = engine_from_config(
69
+ config.get_section(config.config_ini_section, {}),
70
+ prefix="sqlalchemy.",
71
+ poolclass=pool.NullPool,
72
+ )
73
+
74
+ with connectable.connect() as connection:
75
+ context.configure(
76
+ connection=connection, target_metadata=target_metadata
77
+ )
78
+
79
+ with context.begin_transaction():
80
+ context.run_migrations()
81
+
82
+
83
+ if context.is_offline_mode():
84
+ run_migrations_offline()
85
+ else:
86
+ run_migrations_online()
alembic/script.py.mako ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """${message}
2
+
3
+ Revision ID: ${up_revision}
4
+ Revises: ${down_revision | comma,n}
5
+ Create Date: ${create_date}
6
+
7
+ """
8
+ from typing import Sequence, Union
9
+
10
+ from alembic import op
11
+ import sqlalchemy as sa
12
+ ${imports if imports else ""}
13
+
14
+ # revision identifiers, used by Alembic.
15
+ revision: str = ${repr(up_revision)}
16
+ down_revision: Union[str, None] = ${repr(down_revision)}
17
+ branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
18
+ depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
19
+
20
+
21
+ def upgrade() -> None:
22
+ ${upgrades if upgrades else "pass"}
23
+
24
+
25
+ def downgrade() -> None:
26
+ ${downgrades if downgrades else "pass"}
alembic/versions/404f8a028e0e_add_bot_name.py ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """add bot_name
2
+
3
+ Revision ID: 404f8a028e0e
4
+ Revises: 98b1b4a0de39
5
+ Create Date: 2024-10-04 14:03:44.098762
6
+
7
+ """
8
+ from typing import Sequence, Union
9
+
10
+ from alembic import op
11
+ import sqlalchemy as sa
12
+
13
+
14
+ # revision identifiers, used by Alembic.
15
+ revision: str = '404f8a028e0e'
16
+ down_revision: Union[str, None] = '98b1b4a0de39'
17
+ branch_labels: Union[str, Sequence[str], None] = None
18
+ depends_on: Union[str, Sequence[str], None] = None
19
+
20
+
21
+ def upgrade() -> None:
22
+ # ### commands auto generated by Alembic - please adjust! ###
23
+ op.add_column('bot', sa.Column('bot_name', sa.String(length=200), nullable=False))
24
+ # ### end Alembic commands ###
25
+
26
+
27
+ def downgrade() -> None:
28
+ # ### commands auto generated by Alembic - please adjust! ###
29
+ op.drop_column('bot', 'bot_name')
30
+ # ### end Alembic commands ###
alembic/versions/426e52aa13aa_migration_description.py ADDED
@@ -0,0 +1,91 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """migration description
2
+
3
+ Revision ID: 426e52aa13aa
4
+ Revises:
5
+ Create Date: 2024-10-02 14:32:47.859996
6
+
7
+ """
8
+ from typing import Sequence, Union
9
+
10
+ from alembic import op
11
+ import sqlalchemy as sa
12
+ from sqlalchemy.dialects import mysql
13
+
14
+ # revision identifiers, used by Alembic.
15
+ revision: str = '426e52aa13aa'
16
+ down_revision: Union[str, None] = None
17
+ branch_labels: Union[str, Sequence[str], None] = None
18
+ depends_on: Union[str, Sequence[str], None] = None
19
+
20
+
21
+ def upgrade() -> None:
22
+ # ### commands auto generated by Alembic - please adjust! ###
23
+ op.drop_table('Category')
24
+ op.drop_table('Role')
25
+ op.drop_table('Metadata')
26
+ op.drop_table('_prisma_migrations')
27
+ op.drop_index('email', table_name='User')
28
+ op.drop_table('User')
29
+ op.add_column('bot', sa.Column('user_id', sa.Integer(), nullable=True))
30
+ op.add_column('metadata', sa.Column('title', sa.String(length=100), nullable=True))
31
+ op.create_foreign_key(None, 'bot', 'user', ['user_id'], ['id'])
32
+ op.alter_column('session', 'id',
33
+ existing_type=mysql.CHAR(length=36),
34
+ type_=sa.String(length=36),
35
+ existing_nullable=False)
36
+ # ### end Alembic commands ###
37
+
38
+
39
+ def downgrade() -> None:
40
+ # ### commands auto generated by Alembic - please adjust! ###
41
+ op.alter_column('session', 'id',
42
+ existing_type=sa.String(length=36),
43
+ type_=mysql.CHAR(length=36),
44
+ existing_nullable=False)
45
+ op.drop_constraint(None, 'bot', type_='foreignkey')
46
+ op.drop_column('bot', 'user_id')
47
+ op.create_table('User',
48
+ sa.Column('id', mysql.INTEGER(), autoincrement=True, nullable=False),
49
+ sa.Column('name', mysql.VARCHAR(length=50), nullable=False),
50
+ sa.Column('email', mysql.VARCHAR(length=100), nullable=False),
51
+ sa.Column('password_hash', mysql.VARCHAR(length=100), nullable=False),
52
+ sa.Column('created_at', mysql.DATETIME(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
53
+ sa.Column('updated_at', mysql.DATETIME(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
54
+ sa.PrimaryKeyConstraint('id')
55
+ )
56
+ op.create_index('email', 'User', ['email'], unique=True)
57
+ op.create_table('_prisma_migrations',
58
+ sa.Column('id', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=36), nullable=False),
59
+ sa.Column('checksum', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=64), nullable=False),
60
+ sa.Column('finished_at', mysql.DATETIME(fsp=3), nullable=True),
61
+ sa.Column('migration_name', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=255), nullable=False),
62
+ sa.Column('logs', mysql.TEXT(collation='utf8mb4_unicode_ci'), nullable=True),
63
+ sa.Column('rolled_back_at', mysql.DATETIME(fsp=3), nullable=True),
64
+ sa.Column('started_at', mysql.DATETIME(fsp=3), server_default=sa.text('CURRENT_TIMESTAMP(3)'), nullable=False),
65
+ sa.Column('applied_steps_count', mysql.INTEGER(unsigned=True), server_default=sa.text("'0'"), autoincrement=False, nullable=False),
66
+ sa.PrimaryKeyConstraint('id')
67
+ )
68
+ op.create_table('Metadata',
69
+ sa.Column('id', mysql.INTEGER(), autoincrement=True, nullable=False),
70
+ sa.Column('title', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=False),
71
+ sa.Column('category', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=False),
72
+ sa.Column('author', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=False),
73
+ sa.Column('year', mysql.INTEGER(), autoincrement=False, nullable=False),
74
+ sa.Column('publisher', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=False),
75
+ sa.Column('createdAt', mysql.DATETIME(fsp=3), server_default=sa.text('CURRENT_TIMESTAMP(3)'), nullable=False),
76
+ sa.Column('updatedAt', mysql.DATETIME(fsp=3), nullable=False),
77
+ sa.PrimaryKeyConstraint('id')
78
+ )
79
+ op.create_table('Role',
80
+ sa.Column('id', mysql.INTEGER(), autoincrement=True, nullable=False),
81
+ sa.Column('role_name', mysql.VARCHAR(length=100), nullable=False),
82
+ sa.Column('description', mysql.VARCHAR(length=100), nullable=True),
83
+ sa.PrimaryKeyConstraint('id')
84
+ )
85
+ op.create_table('Category',
86
+ sa.Column('id', mysql.INTEGER(), autoincrement=True, nullable=False),
87
+ sa.Column('category', mysql.VARCHAR(length=100), nullable=True),
88
+ sa.Column('created_at', mysql.DATETIME(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
89
+ sa.PrimaryKeyConstraint('id')
90
+ )
91
+ # ### end Alembic commands ###
alembic/versions/4dd226fee84e_add_updated_data.py ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """add updated data
2
+
3
+ Revision ID: 4dd226fee84e
4
+ Revises: 6bd7ce57dca0
5
+ Create Date: 2024-10-07 14:44:03.718963
6
+
7
+ """
8
+ from typing import Sequence, Union
9
+
10
+ from alembic import op
11
+ import sqlalchemy as sa
12
+
13
+
14
+ # revision identifiers, used by Alembic.
15
+ revision: str = '4dd226fee84e'
16
+ down_revision: Union[str, None] = '6bd7ce57dca0'
17
+ branch_labels: Union[str, Sequence[str], None] = None
18
+ depends_on: Union[str, Sequence[str], None] = None
19
+
20
+
21
+ def upgrade() -> None:
22
+ # ### commands auto generated by Alembic - please adjust! ###
23
+ op.add_column('category', sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False))
24
+ # ### end Alembic commands ###
25
+
26
+
27
+ def downgrade() -> None:
28
+ # ### commands auto generated by Alembic - please adjust! ###
29
+ op.drop_column('category', 'updated_at')
30
+ # ### end Alembic commands ###
alembic/versions/6bd7ce57dca0_add_updated_at.py ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """add updated at
2
+
3
+ Revision ID: 6bd7ce57dca0
4
+ Revises: 404f8a028e0e
5
+ Create Date: 2024-10-07 14:02:25.722428
6
+
7
+ """
8
+ from typing import Sequence, Union
9
+
10
+ from alembic import op
11
+ import sqlalchemy as sa
12
+
13
+
14
+ # revision identifiers, used by Alembic.
15
+ revision: str = '6bd7ce57dca0'
16
+ down_revision: Union[str, None] = '404f8a028e0e'
17
+ branch_labels: Union[str, Sequence[str], None] = None
18
+ depends_on: Union[str, Sequence[str], None] = None
19
+
20
+
21
+ def upgrade() -> None:
22
+ # ### commands auto generated by Alembic - please adjust! ###
23
+ op.add_column('category', sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False))
24
+ # ### end Alembic commands ###
25
+
26
+
27
+ def downgrade() -> None:
28
+ # ### commands auto generated by Alembic - please adjust! ###
29
+ op.drop_column('category', 'updated_at')
30
+ # ### end Alembic commands ###
alembic/versions/6e972ae8b93b_add_updated_at_session.py ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """add updated at session
2
+
3
+ Revision ID: 6e972ae8b93b
4
+ Revises: 82908d30ae5a
5
+ Create Date: 2024-10-10 14:10:14.433336
6
+
7
+ """
8
+ from typing import Sequence, Union
9
+
10
+ from alembic import op
11
+ import sqlalchemy as sa
12
+
13
+
14
+ # revision identifiers, used by Alembic.
15
+ revision: str = '6e972ae8b93b'
16
+ down_revision: Union[str, None] = '82908d30ae5a'
17
+ branch_labels: Union[str, Sequence[str], None] = None
18
+ depends_on: Union[str, Sequence[str], None] = None
19
+
20
+
21
+ def upgrade() -> None:
22
+ # ### commands auto generated by Alembic - please adjust! ###
23
+ op.add_column('session', sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False))
24
+ op.add_column('session_publisher', sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False))
25
+ # ### end Alembic commands ###
26
+
27
+
28
+ def downgrade() -> None:
29
+ # ### commands auto generated by Alembic - please adjust! ###
30
+ op.drop_column('session_publisher', 'updated_at')
31
+ op.drop_column('session', 'updated_at')
32
+ # ### end Alembic commands ###
alembic/versions/82908d30ae5a_add_updated_data.py ADDED
@@ -0,0 +1,50 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """add updated data
2
+
3
+ Revision ID: 82908d30ae5a
4
+ Revises: fcc0580a4769
5
+ Create Date: 2024-10-09 14:12:01.147393
6
+
7
+ """
8
+ from typing import Sequence, Union
9
+
10
+ from alembic import op
11
+ import sqlalchemy as sa
12
+ from sqlalchemy.dialects import mysql
13
+
14
+ # revision identifiers, used by Alembic.
15
+ revision: str = '82908d30ae5a'
16
+ down_revision: Union[str, None] = 'fcc0580a4769'
17
+ branch_labels: Union[str, Sequence[str], None] = None
18
+ depends_on: Union[str, Sequence[str], None] = None
19
+
20
+
21
+ def upgrade() -> None:
22
+ # ### commands auto generated by Alembic - please adjust! ###
23
+ op.create_table('session_publisher',
24
+ sa.Column('id', sa.String(length=36), nullable=False),
25
+ sa.Column('user_id', sa.Integer(), nullable=True),
26
+ sa.Column('metadata_id', sa.Integer(), nullable=True),
27
+ sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
28
+ sa.ForeignKeyConstraint(['metadata_id'], ['metadata.id'], ),
29
+ sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
30
+ sa.PrimaryKeyConstraint('id')
31
+ )
32
+ op.create_index(op.f('ix_session_publisher_id'), 'session_publisher', ['id'], unique=False)
33
+ op.add_column('metadata', sa.Column('thumbnail', sa.LargeBinary(), nullable=True))
34
+ op.add_column('planning', sa.Column('trials_id', sa.Integer(), nullable=True))
35
+ op.create_foreign_key(None, 'planning', 'trials', ['trials_id'], ['id'])
36
+ op.drop_column('planning', 'token_planned')
37
+ op.drop_column('planning', 'token_used')
38
+ # ### end Alembic commands ###
39
+
40
+
41
+ def downgrade() -> None:
42
+ # ### commands auto generated by Alembic - please adjust! ###
43
+ op.add_column('planning', sa.Column('token_used', mysql.INTEGER(), autoincrement=False, nullable=False))
44
+ op.add_column('planning', sa.Column('token_planned', mysql.INTEGER(), autoincrement=False, nullable=False))
45
+ op.drop_constraint(None, 'planning', type_='foreignkey')
46
+ op.drop_column('planning', 'trials_id')
47
+ op.drop_column('metadata', 'thumbnail')
48
+ op.drop_index(op.f('ix_session_publisher_id'), table_name='session_publisher')
49
+ op.drop_table('session_publisher')
50
+ # ### end Alembic commands ###
alembic/versions/98b1b4a0de39_add_username.py ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """add username
2
+
3
+ Revision ID: 98b1b4a0de39
4
+ Revises: c818e5b84075
5
+ Create Date: 2024-10-03 14:27:43.877725
6
+
7
+ """
8
+ from typing import Sequence, Union
9
+
10
+ from alembic import op
11
+ import sqlalchemy as sa
12
+
13
+
14
+ # revision identifiers, used by Alembic.
15
+ revision: str = '98b1b4a0de39'
16
+ down_revision: Union[str, None] = 'c818e5b84075'
17
+ branch_labels: Union[str, Sequence[str], None] = None
18
+ depends_on: Union[str, Sequence[str], None] = None
19
+
20
+
21
+ def upgrade() -> None:
22
+ # ### commands auto generated by Alembic - please adjust! ###
23
+ op.add_column('user', sa.Column('username', sa.String(length=100), nullable=False))
24
+ op.create_unique_constraint(None, 'user', ['username'])
25
+ # ### end Alembic commands ###
26
+
27
+
28
+ def downgrade() -> None:
29
+ # ### commands auto generated by Alembic - please adjust! ###
30
+ op.drop_constraint(None, 'user', type_='unique')
31
+ op.drop_column('user', 'username')
32
+ # ### end Alembic commands ###
alembic/versions/b1df0377cbe2_change_time_zome.py ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """change time zome
2
+
3
+ Revision ID: b1df0377cbe2
4
+ Revises: c0e9d62caae8
5
+ Create Date: 2024-10-10 14:44:44.626184
6
+
7
+ """
8
+ from typing import Sequence, Union
9
+
10
+ from alembic import op
11
+ import sqlalchemy as sa
12
+
13
+
14
+ # revision identifiers, used by Alembic.
15
+ revision: str = 'b1df0377cbe2'
16
+ down_revision: Union[str, None] = 'c0e9d62caae8'
17
+ branch_labels: Union[str, Sequence[str], None] = None
18
+ depends_on: Union[str, Sequence[str], None] = None
19
+
20
+
21
+ def upgrade() -> None:
22
+ # ### commands auto generated by Alembic - please adjust! ###
23
+ pass
24
+ # ### end Alembic commands ###
25
+
26
+
27
+ def downgrade() -> None:
28
+ # ### commands auto generated by Alembic - please adjust! ###
29
+ pass
30
+ # ### end Alembic commands ###
alembic/versions/c0e9d62caae8_change_time_zome.py ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """change time zome
2
+
3
+ Revision ID: c0e9d62caae8
4
+ Revises: 6e972ae8b93b
5
+ Create Date: 2024-10-10 14:37:41.728523
6
+
7
+ """
8
+ from typing import Sequence, Union
9
+
10
+ from alembic import op
11
+ import sqlalchemy as sa
12
+
13
+
14
+ # revision identifiers, used by Alembic.
15
+ revision: str = 'c0e9d62caae8'
16
+ down_revision: Union[str, None] = '6e972ae8b93b'
17
+ branch_labels: Union[str, Sequence[str], None] = None
18
+ depends_on: Union[str, Sequence[str], None] = None
19
+
20
+
21
+ def upgrade() -> None:
22
+ # ### commands auto generated by Alembic - please adjust! ###
23
+ pass
24
+ # ### end Alembic commands ###
25
+
26
+
27
+ def downgrade() -> None:
28
+ # ### commands auto generated by Alembic - please adjust! ###
29
+ pass
30
+ # ### end Alembic commands ###
alembic/versions/c818e5b84075_add_role_id.py ADDED
@@ -0,0 +1,108 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """add role id
2
+
3
+ Revision ID: c818e5b84075
4
+ Revises: 426e52aa13aa
5
+ Create Date: 2024-10-03 09:35:53.882054
6
+
7
+ """
8
+ from typing import Sequence, Union
9
+
10
+ from alembic import op
11
+ import sqlalchemy as sa
12
+ from sqlalchemy.dialects import mysql
13
+
14
+ # revision identifiers, used by Alembic.
15
+ revision: str = 'c818e5b84075'
16
+ down_revision: Union[str, None] = '426e52aa13aa'
17
+ branch_labels: Union[str, Sequence[str], None] = None
18
+ depends_on: Union[str, Sequence[str], None] = None
19
+
20
+
21
+ def upgrade() -> None:
22
+ # ### commands auto generated by Alembic - please adjust! ###
23
+ op.drop_table('_prisma_migrations')
24
+ op.alter_column('category', 'category',
25
+ existing_type=mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=100),
26
+ type_=sa.String(length=200),
27
+ existing_nullable=True)
28
+ op.alter_column('feedback', 'comment',
29
+ existing_type=mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=100),
30
+ type_=sa.String(length=1000),
31
+ existing_nullable=True)
32
+ op.alter_column('message', 'goal',
33
+ existing_type=mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=100),
34
+ type_=sa.String(length=200),
35
+ existing_nullable=True)
36
+ op.alter_column('metadata', 'title',
37
+ existing_type=mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=100),
38
+ type_=sa.String(length=200),
39
+ existing_nullable=True)
40
+ op.alter_column('metadata', 'author',
41
+ existing_type=mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=100),
42
+ type_=sa.String(length=200),
43
+ existing_nullable=True)
44
+ op.alter_column('role', 'role_name',
45
+ existing_type=mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=100),
46
+ type_=sa.String(length=200),
47
+ existing_nullable=False)
48
+ op.alter_column('role', 'description',
49
+ existing_type=mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=100),
50
+ type_=sa.String(length=200),
51
+ existing_nullable=True)
52
+ op.add_column('user', sa.Column('role_id', sa.Integer(), nullable=True))
53
+ op.alter_column('user', 'name',
54
+ existing_type=mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=50),
55
+ type_=sa.String(length=100),
56
+ existing_nullable=False)
57
+ op.create_foreign_key(None, 'user', 'role', ['role_id'], ['id'])
58
+ # ### end Alembic commands ###
59
+
60
+
61
+ def downgrade() -> None:
62
+ # ### commands auto generated by Alembic - please adjust! ###
63
+ op.drop_constraint(None, 'user', type_='foreignkey')
64
+ op.alter_column('user', 'name',
65
+ existing_type=sa.String(length=100),
66
+ type_=mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=50),
67
+ existing_nullable=False)
68
+ op.drop_column('user', 'role_id')
69
+ op.alter_column('role', 'description',
70
+ existing_type=sa.String(length=200),
71
+ type_=mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=100),
72
+ existing_nullable=True)
73
+ op.alter_column('role', 'role_name',
74
+ existing_type=sa.String(length=200),
75
+ type_=mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=100),
76
+ existing_nullable=False)
77
+ op.alter_column('metadata', 'author',
78
+ existing_type=sa.String(length=200),
79
+ type_=mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=100),
80
+ existing_nullable=True)
81
+ op.alter_column('metadata', 'title',
82
+ existing_type=sa.String(length=200),
83
+ type_=mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=100),
84
+ existing_nullable=True)
85
+ op.alter_column('message', 'goal',
86
+ existing_type=sa.String(length=200),
87
+ type_=mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=100),
88
+ existing_nullable=True)
89
+ op.alter_column('feedback', 'comment',
90
+ existing_type=sa.String(length=1000),
91
+ type_=mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=100),
92
+ existing_nullable=True)
93
+ op.alter_column('category', 'category',
94
+ existing_type=sa.String(length=200),
95
+ type_=mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=100),
96
+ existing_nullable=True)
97
+ op.create_table('_prisma_migrations',
98
+ sa.Column('id', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=36), nullable=False),
99
+ sa.Column('checksum', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=64), nullable=False),
100
+ sa.Column('finished_at', mysql.DATETIME(fsp=3), nullable=True),
101
+ sa.Column('migration_name', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=255), nullable=False),
102
+ sa.Column('logs', mysql.TEXT(collation='utf8mb4_unicode_ci'), nullable=True),
103
+ sa.Column('rolled_back_at', mysql.DATETIME(fsp=3), nullable=True),
104
+ sa.Column('started_at', mysql.DATETIME(fsp=3), server_default=sa.text('CURRENT_TIMESTAMP(3)'), nullable=False),
105
+ sa.Column('applied_steps_count', mysql.INTEGER(unsigned=True), server_default=sa.text("'0'"), autoincrement=False, nullable=False),
106
+ sa.PrimaryKeyConstraint('id')
107
+ )
108
+ # ### end Alembic commands ###
alembic/versions/fcc0580a4769_add_updated_model_planning.py ADDED
@@ -0,0 +1,48 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """add updated model planning
2
+
3
+ Revision ID: fcc0580a4769
4
+ Revises: 4dd226fee84e
5
+ Create Date: 2024-10-08 10:01:41.964473
6
+
7
+ """
8
+ from typing import Sequence, Union
9
+
10
+ from alembic import op
11
+ import sqlalchemy as sa
12
+
13
+
14
+ # revision identifiers, used by Alembic.
15
+ revision: str = 'fcc0580a4769'
16
+ down_revision: Union[str, None] = '4dd226fee84e'
17
+ branch_labels: Union[str, Sequence[str], None] = None
18
+ depends_on: Union[str, Sequence[str], None] = None
19
+
20
+
21
+ def upgrade() -> None:
22
+ # ### commands auto generated by Alembic - please adjust! ###
23
+ op.add_column('bot_meta', sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False))
24
+ op.add_column('bot_meta', sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False))
25
+ op.add_column('planning', sa.Column('planning_name', sa.String(length=200), nullable=False))
26
+ op.add_column('planning', sa.Column('duration', sa.Integer(), nullable=False))
27
+ op.add_column('planning', sa.Column('token_used', sa.Integer(), nullable=False))
28
+ op.add_column('planning', sa.Column('token_planned', sa.Integer(), nullable=False))
29
+ op.add_column('planning', sa.Column('start_date', sa.DateTime(), nullable=False))
30
+ op.add_column('planning', sa.Column('end_date', sa.DateTime(), nullable=False))
31
+ op.add_column('planning', sa.Column('is_activated', sa.Boolean(), nullable=True))
32
+ op.add_column('planning', sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False))
33
+ # ### end Alembic commands ###
34
+
35
+
36
+ def downgrade() -> None:
37
+ # ### commands auto generated by Alembic - please adjust! ###
38
+ op.drop_column('planning', 'created_at')
39
+ op.drop_column('planning', 'is_activated')
40
+ op.drop_column('planning', 'end_date')
41
+ op.drop_column('planning', 'start_date')
42
+ op.drop_column('planning', 'token_planned')
43
+ op.drop_column('planning', 'token_used')
44
+ op.drop_column('planning', 'duration')
45
+ op.drop_column('planning', 'planning_name')
46
+ op.drop_column('bot_meta', 'updated_at')
47
+ op.drop_column('bot_meta', 'created_at')
48
+ # ### end Alembic commands ###
api/auth.py CHANGED
@@ -0,0 +1,71 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import APIRouter, Depends
2
+ from fastapi.responses import JSONResponse
3
+ from fastapi.security import OAuth2PasswordBearer
4
+ from dotenv import load_dotenv
5
+
6
+ from sqlalchemy.orm import Session
7
+ from db.models import User
8
+ from starlette import status
9
+ from datetime import timedelta, datetime, timezone
10
+ from db.database import get_db
11
+ from passlib.context import CryptContext
12
+ from typing import Annotated
13
+ from jose import jwt, JWTError
14
+ import os
15
+
16
+ load_dotenv()
17
+
18
+ oauth2_scheme = OAuth2PasswordBearer(tokenUrl="login")
19
+
20
+ # Custom OAuth2 request form to accept email, username, password, and role_id
21
+ router = APIRouter(prefix="/auth", tags=["auth"])
22
+
23
+ SECRET_KEY = os.getenv("SECRET_KEY")
24
+ ALGORITHM = "HS256"
25
+
26
+ bcrypt_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
27
+
28
+ # Database dependency
29
+ db_dependency = Annotated[Session, Depends(get_db)]
30
+
31
+
32
+ def authenticate_user(email: str, password: str, db):
33
+ user = db.query(User).filter(User.email == email).first()
34
+ if not user:
35
+ return False
36
+
37
+ if not bcrypt_context.verify(password, user.hashed_password):
38
+ return False
39
+ return user
40
+
41
+
42
+ def create_access_token(
43
+ username: str, name: str, user_id: int, role_id: int, expires_delta: timedelta, email: str
44
+ ):
45
+ encode = {"sub": username, "name":name, "id": user_id, "role_id": role_id, "email": email}
46
+ expires = datetime.now(timezone.utc) + expires_delta
47
+ encode.update({"exp": expires})
48
+ return jwt.encode(encode, SECRET_KEY, algorithm=ALGORITHM)
49
+
50
+
51
+ async def get_current_user(token: Annotated[str, Depends(oauth2_scheme)]):
52
+ try:
53
+ payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
54
+ username: str = payload.get("sub")
55
+ name: str = payload.get("name")
56
+ user_id: int = payload.get("id")
57
+ role_id: int = payload.get("role_id")
58
+ email: str = payload.get("email")
59
+
60
+ if username is None or user_id is None:
61
+ return JSONResponse(
62
+ status_code=status.HTTP_401_UNAUTHORIZED,
63
+ content="Could not validate user.",
64
+ )
65
+
66
+ return {"username": username, "name" : name, "id": user_id, "role_id": role_id, "email": email}
67
+
68
+ except JWTError:
69
+ return JSONResponse(
70
+ status_code=status.HTTP_401_UNAUTHORIZED, content="Could not validate user."
71
+ )
api/events.py CHANGED
@@ -1,14 +1,18 @@
1
  from fastapi import FastAPI
2
- from api.router.topic import db_conn
 
 
3
  from llama_index.core import set_global_handler
4
- import os
5
  from dotenv import load_dotenv
 
6
 
7
 
8
  load_dotenv()
9
 
10
 
11
  async def startup() -> None:
 
 
12
  await db_conn.connect()
13
  os.environ["LANGFUSE_SECRET_KEY"] = os.getenv("LANGFUSE_SECRET_KEY")
14
  os.environ["LANGFUSE_PUBLIC_KEY"] = os.getenv("LANGFUSE_PUBLIC_KEY")
@@ -19,7 +23,6 @@ async def startup() -> None:
19
  async def shutdown() -> None:
20
  await db_conn.disconnect()
21
 
22
-
23
  def register_events(app: FastAPI) -> FastAPI:
24
  app.add_event_handler("startup", startup)
25
  app.add_event_handler("shutdown", shutdown)
 
1
  from fastapi import FastAPI
2
+ from db.models import Base
3
+ from db.database import engine
4
+ from api.router.book import db_conn
5
  from llama_index.core import set_global_handler
 
6
  from dotenv import load_dotenv
7
+ import os
8
 
9
 
10
  load_dotenv()
11
 
12
 
13
  async def startup() -> None:
14
+ Base.metadata.create_all(engine)
15
+ print("table added")
16
  await db_conn.connect()
17
  os.environ["LANGFUSE_SECRET_KEY"] = os.getenv("LANGFUSE_SECRET_KEY")
18
  os.environ["LANGFUSE_PUBLIC_KEY"] = os.getenv("LANGFUSE_PUBLIC_KEY")
 
23
  async def shutdown() -> None:
24
  await db_conn.disconnect()
25
 
 
26
  def register_events(app: FastAPI) -> FastAPI:
27
  app.add_event_handler("startup", startup)
28
  app.add_event_handler("shutdown", shutdown)
api/function.py CHANGED
@@ -1,75 +1,69 @@
1
  from script.vector_db import IndexManager
2
  from script.document_uploader import Uploader
3
- from db.save_data import InsertDatabase
4
  from db.get_data import GetDatabase
5
  from db.delete_data import DeleteDatabase
6
  from db.update_data import UpdateDatabase
7
 
8
- from typing import Any, Optional, List
9
  from fastapi import UploadFile
10
  from fastapi import HTTPException
 
11
 
12
- from service.dto import ChatMessage
13
  from core.chat.engine import Engine
14
  from core.chat.chatstore import ChatStore
15
- from core.parser import clean_text, update_response, renumber_sources, seperate_to_list
16
- from llama_index.core.llms import MessageRole
17
- from service.dto import BotResponseStreaming
18
  from service.aws_loader import Loader
19
 
 
 
 
 
 
 
20
  import logging
21
  import re
22
  import json
 
23
 
 
24
 
25
  # Configure logging
26
  logging.basicConfig(level=logging.INFO)
27
 
28
- # async def data_ingestion(
29
- # db_conn, reference, file: UploadFile, content_table: UploadFile
30
- # ) -> Any:
31
 
32
-
33
- async def data_ingestion(db_conn, reference, file: UploadFile) -> Any:
34
  try:
35
-
36
- # insert_database = InsertDatabase(db_conn)
37
-
38
  file_name = f"{reference['title']}"
39
  aws_loader = Loader()
40
 
41
  file_obj = file
42
  aws_loader.upload_to_s3(file_obj, file_name)
43
 
44
- print("Uploaded Success")
45
-
46
- response = json.dumps({"status": "success", "message": "Vector Index loaded successfully."})
47
-
48
- # Insert data into the database
49
- # await insert_database.insert_data(reference)
50
-
51
- # # uploader = Uploader(reference, file, content_table)
52
- # uploader = Uploader(reference, file)
53
- # print("uploader : ", uploader)
54
 
55
- # nodes_with_metadata = await uploader.process_documents()
56
 
57
- # # Build indexes using IndexManager
58
- # index = IndexManager()
59
- # response = index.build_indexes(nodes_with_metadata)
60
 
61
- return response
 
 
62
 
63
  except Exception as e:
64
  # Log the error and raise HTTPException for FastAPI
65
- logging.error(f"An error occurred in data ingestion: {e}")
66
- raise HTTPException(
67
  status_code=500,
68
- detail="An internal server error occurred in data ingestion.",
69
  )
70
 
71
 
72
- async def get_data(db_conn, title="", fetch_all_data=True):
73
  get_database = GetDatabase(db_conn)
74
  print(get_database)
75
  try:
@@ -85,9 +79,9 @@ async def get_data(db_conn, title="", fetch_all_data=True):
85
 
86
  except Exception as e:
87
  # Log the error and raise HTTPException for FastAPI
88
- logging.error(f"An error occurred in get data.: {e}")
89
- raise HTTPException(
90
- status_code=500, detail="An internal server error occurred in get data."
91
  )
92
 
93
 
@@ -103,9 +97,9 @@ async def update_data(id: int, reference, db_conn):
103
  return response
104
  except Exception as e:
105
  # Log the error and raise HTTPException for FastAPI
106
- logging.error(f"An error occurred in update data.: {e}")
107
- raise HTTPException(
108
- status_code=500, detail="An internal server error occurred in update data."
109
  )
110
 
111
 
@@ -118,43 +112,48 @@ async def delete_data(id: int, db_conn):
118
  return response
119
  except Exception as e:
120
  # Log the error and raise HTTPException for FastAPI
121
- logging.error(f"An error occurred in get data.: {e}")
122
- raise HTTPException(
123
- status_code=500, detail="An internal server error occurred in delete data."
124
  )
125
 
126
 
127
  def generate_completion_non_streaming(
128
- session_id, user_request, chat_engine, title=None, category=None, type="general"
129
  ):
 
 
 
 
 
 
 
 
 
 
 
 
130
  try:
131
- engine = Engine()
132
- index_manager = IndexManager()
133
- chatstore = ChatStore()
134
 
135
  # Load existing indexes
136
  index = index_manager.load_existing_indexes()
137
 
138
- if type == "general":
139
  # Retrieve the chat engine with the loaded index
140
  chat_engine = engine.get_chat_engine(session_id, index)
141
  else:
142
  # Retrieve the chat engine with the loaded index
143
- chat_engine = engine.get_chat_engine(
144
- session_id, index, title=title, category=category
145
- )
146
 
147
  # Generate completion response
148
  response = chat_engine.chat(user_request)
149
 
150
  sources = response.sources
151
- print(sources)
152
 
153
  number_reference = list(set(re.findall(r"\[(\d+)\]", str(response))))
154
  number_reference_sorted = sorted(number_reference)
155
 
156
  contents = []
157
- raw_contents = []
158
  metadata_collection = []
159
  scores = []
160
 
@@ -170,9 +169,6 @@ def generate_completion_non_streaming(
170
  # Pastikan number valid sebagai indeks
171
  if 0 <= number - 1 < len(node):
172
 
173
- raw_content = seperate_to_list(node[number - 1].node.get_text())
174
- raw_contents.append(raw_content)
175
-
176
  content = clean_text(node[number - 1].node.get_text())
177
  contents.append(content)
178
 
@@ -206,18 +202,47 @@ def generate_completion_non_streaming(
206
  chatstore.delete_last_message(session_id)
207
  chatstore.add_message(session_id, message)
208
  chatstore.clean_message(session_id)
209
-
210
- return str(response), raw_contents, contents, metadata_collection, scores
211
  except Exception as e:
212
  # Log the error and raise HTTPException for FastAPI
213
- logging.error(f"An error occurred in generate text: {e}")
214
- raise HTTPException(
215
  status_code=500,
216
- detail="An internal server error occurred in generate text.",
217
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
218
 
 
 
 
 
 
 
 
 
219
 
220
- async def generate_streaming_completion(user_request, chat_engine):
221
  try:
222
  engine = Engine()
223
  index_manager = IndexManager()
@@ -226,7 +251,7 @@ async def generate_streaming_completion(user_request, chat_engine):
226
  index = index_manager.load_existing_indexes()
227
 
228
  # Retrieve the chat engine with the loaded index
229
- chat_engine = engine.get_chat_engine(index)
230
  # Generate completion response
231
  response = chat_engine.stream_chat(user_request)
232
 
@@ -258,4 +283,4 @@ async def generate_streaming_completion(user_request, chat_engine):
258
  raise HTTPException(
259
  status_code=500,
260
  detail="An internal server error occurred in generate text.",
261
- )
 
1
  from script.vector_db import IndexManager
2
  from script.document_uploader import Uploader
 
3
  from db.get_data import GetDatabase
4
  from db.delete_data import DeleteDatabase
5
  from db.update_data import UpdateDatabase
6
 
7
+ from typing import Any
8
  from fastapi import UploadFile
9
  from fastapi import HTTPException
10
+ from fastapi.responses import JSONResponse
11
 
12
+ from llama_index.core.llms import MessageRole
13
  from core.chat.engine import Engine
14
  from core.chat.chatstore import ChatStore
15
+ from core.parser import clean_text, update_response, renumber_sources
16
+ from service.dto import BotResponseStreaming, ChatMessage
 
17
  from service.aws_loader import Loader
18
 
19
+ from pymongo.mongo_client import MongoClient
20
+ from dotenv import load_dotenv
21
+
22
+ from typing import List
23
+ from datetime import datetime
24
+ import redis
25
  import logging
26
  import re
27
  import json
28
+ import os
29
 
30
+ load_dotenv()
31
 
32
  # Configure logging
33
  logging.basicConfig(level=logging.INFO)
34
 
 
 
 
35
 
36
+ async def data_ingestion(category_id, reference, file: UploadFile) -> Any:
 
37
  try:
38
+ # Upload to AWS
 
 
39
  file_name = f"{reference['title']}"
40
  aws_loader = Loader()
41
 
42
  file_obj = file
43
  aws_loader.upload_to_s3(file_obj, file_name)
44
 
45
+ uploader = Uploader(reference, file)
 
 
 
 
 
 
 
 
 
46
 
47
+ nodes_with_metadata = await uploader.process_documents()
48
 
49
+ # Build indexes using IndexManager
50
+ index = IndexManager()
51
+ index.build_indexes(nodes_with_metadata)
52
 
53
+ return json.dumps(
54
+ {"status": "success", "message": "Vector Index loaded successfully."}
55
+ )
56
 
57
  except Exception as e:
58
  # Log the error and raise HTTPException for FastAPI
59
+ logging.error("An error occurred in data ingestion: %s", e)
60
+ return JSONResponse(
61
  status_code=500,
62
+ content="An internal server error occurred in data ingestion.",
63
  )
64
 
65
 
66
+ async def get_data(db_conn, title=None, fetch_all_data=True):
67
  get_database = GetDatabase(db_conn)
68
  print(get_database)
69
  try:
 
79
 
80
  except Exception as e:
81
  # Log the error and raise HTTPException for FastAPI
82
+ logging.error("An error occurred in get data: %s", e)
83
+ return JSONResponse(
84
+ status_code=500, content="An internal server error occurred in get data."
85
  )
86
 
87
 
 
97
  return response
98
  except Exception as e:
99
  # Log the error and raise HTTPException for FastAPI
100
+ logging.error("An error occurred in update data: %s", e)
101
+ return JSONResponse(
102
+ status_code=500, content="An internal server error occurred in update data."
103
  )
104
 
105
 
 
112
  return response
113
  except Exception as e:
114
  # Log the error and raise HTTPException for FastAPI
115
+ logging.error("An error occurred in get data: %s", e)
116
+ return JSONResponse(
117
+ status_code=500, content="An internal server error occurred in delete data."
118
  )
119
 
120
 
121
  def generate_completion_non_streaming(
122
+ session_id, user_request, titles: List = None, type_bot="general"
123
  ):
124
+ uri = os.getenv("MONGO_URI")
125
+ engine = Engine()
126
+ index_manager = IndexManager()
127
+ chatstore = ChatStore()
128
+ client = MongoClient(uri)
129
+
130
+ try:
131
+ client.admin.command("ping")
132
+ print("Pinged your deployment. You successfully connected to MongoDB!")
133
+ except Exception as e:
134
+ return JSONResponse(status_code=500, content=f"Database Error as {e}")
135
+
136
  try:
 
 
 
137
 
138
  # Load existing indexes
139
  index = index_manager.load_existing_indexes()
140
 
141
+ if type_bot == "general":
142
  # Retrieve the chat engine with the loaded index
143
  chat_engine = engine.get_chat_engine(session_id, index)
144
  else:
145
  # Retrieve the chat engine with the loaded index
146
+ chat_engine = engine.get_chat_engine(session_id, index, titles, type_bot)
 
 
147
 
148
  # Generate completion response
149
  response = chat_engine.chat(user_request)
150
 
151
  sources = response.sources
 
152
 
153
  number_reference = list(set(re.findall(r"\[(\d+)\]", str(response))))
154
  number_reference_sorted = sorted(number_reference)
155
 
156
  contents = []
 
157
  metadata_collection = []
158
  scores = []
159
 
 
169
  # Pastikan number valid sebagai indeks
170
  if 0 <= number - 1 < len(node):
171
 
 
 
 
172
  content = clean_text(node[number - 1].node.get_text())
173
  contents.append(content)
174
 
 
202
  chatstore.delete_last_message(session_id)
203
  chatstore.add_message(session_id, message)
204
  chatstore.clean_message(session_id)
 
 
205
  except Exception as e:
206
  # Log the error and raise HTTPException for FastAPI
207
+ logging.error("An error occurred in generate text: %s", e)
208
+ return JSONResponse(
209
  status_code=500,
210
+ content=f"An internal server error occurred in generate text as {e}.")
211
+
212
+ try :
213
+ chat_history_db = [
214
+ ChatMessage(role=MessageRole.SYSTEM,
215
+ content=user_request,
216
+ timestamp=datetime.now(),
217
+ payment = "free" if type_bot=="general" else None
218
+ ),
219
+ ChatMessage(
220
+ role=MessageRole.ASSISTANT,
221
+ content=response,
222
+ metadata=metadata_collection,
223
+ timestamp=datetime.now(),
224
+ payment = "free" if type_bot=="general" else None
225
+ )
226
+ ]
227
+
228
+ chat_history_json = [message.model_dump() for message in chat_history_db]
229
+
230
+ db = client["bot_database"] # Replace with your database name
231
+ collection = db[session_id] # Replace with your collection name
232
+
233
+ result = collection.insert_many(chat_history_json)
234
+ print("Data inserted with record ids", result.inserted_ids)
235
 
236
+ return str(response), metadata_collection, scores
237
+
238
+ except Exception as e:
239
+ # Log the error and raise HTTPException for FastAPI
240
+ logging.error("An error occurred in generate text: %s", e)
241
+ return JSONResponse(
242
+ status_code=500,
243
+ content=f"An internal server error occurred in generate text as {e}.")
244
 
245
+ async def generate_streaming_completion(user_request, session_id):
246
  try:
247
  engine = Engine()
248
  index_manager = IndexManager()
 
251
  index = index_manager.load_existing_indexes()
252
 
253
  # Retrieve the chat engine with the loaded index
254
+ chat_engine = engine.get_chat_engine(index, session_id)
255
  # Generate completion response
256
  response = chat_engine.stream_chat(user_request)
257
 
 
283
  raise HTTPException(
284
  status_code=500,
285
  detail="An internal server error occurred in generate text.",
286
+ ) from e
api/router/book.py ADDED
@@ -0,0 +1,278 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ import base64
3
+
4
+ from typing import Annotated, Optional
5
+ from api.function import data_ingestion, get_data, delete_data, update_data
6
+ from api.router.user import user_dependency
7
+ from fastapi import Form, APIRouter, File, UploadFile, Depends
8
+ from fastapi.responses import JSONResponse
9
+ from db.repository import get_db_conn
10
+ from db.get_data import GetDatabase
11
+ from db.models import Category, Metadata
12
+ from db.database import get_db
13
+ from langfuse.llama_index import LlamaIndexCallbackHandler
14
+ from config import MYSQL_CONFIG
15
+ from script.vector_db import IndexManager
16
+ from service.dto import MetadataRequest, MetadataResponse
17
+ from sqlalchemy.orm import Session
18
+ from sqlalchemy.future import select
19
+ from sqlalchemy.exc import SQLAlchemyError
20
+
21
+
22
+ router = APIRouter(tags=["Book"])
23
+
24
+ db_conn = get_db_conn(MYSQL_CONFIG)
25
+ get_database = GetDatabase(db_conn)
26
+ index_manager = IndexManager()
27
+ db_dependency = Annotated[Session, Depends(get_db)]
28
+
29
+
30
+ @router.post("/book")
31
+ async def upload_file(
32
+ user: user_dependency,
33
+ db: db_dependency,
34
+ title: str = Form(...),
35
+ author: str = Form(...),
36
+ category_id: int = Form(...),
37
+ year: int = Form(...),
38
+ publisher: str = Form(...),
39
+ file: UploadFile = File(...),
40
+ thumbnail: Optional[UploadFile] = File(None),
41
+ ):
42
+ print(user.get("role_id"))
43
+ # if user is None or user.get('role_id') != 1:
44
+ if user is None:
45
+ return JSONResponse(status_code=401, content="Authentication Failed")
46
+
47
+ # Create a new Metadata object
48
+ new_book = Metadata(
49
+ title=title,
50
+ author=author,
51
+ category_id=category_id,
52
+ year=year,
53
+ publisher=publisher,
54
+ )
55
+
56
+ db.add(new_book)
57
+ db.commit()
58
+ logging.info("Database Inserted")
59
+
60
+ try:
61
+ # Query the category based on category_id
62
+ category_query = select(Category.category).where(Category.id == category_id)
63
+ result = db.execute(category_query)
64
+ category = result.scalar_one_or_none()
65
+
66
+ # Check if the category exists
67
+ if category is None:
68
+ return JSONResponse(status_code=404, content="Category not found")
69
+
70
+ except SQLAlchemyError as db_exc:
71
+ # Handle any database-related errors (e.g., connection issues, query issues)
72
+ print(f"Database error: {db_exc}")
73
+ return JSONResponse(status_code=500, content="Database error occurred")
74
+
75
+ except Exception as e:
76
+ # Catch any other general exceptions
77
+ print(f"Error: {e}")
78
+ return JSONResponse(
79
+ status_code=500, content="An error occurred while processing your request"
80
+ )
81
+
82
+ try:
83
+ # Assuming you have a Langfuse callback handler
84
+ langfuse_callback_handler = LlamaIndexCallbackHandler()
85
+ langfuse_callback_handler.set_trace_params(
86
+ user_id="admin_book_uploaded",
87
+ )
88
+
89
+ # Construct the reference dictionary
90
+ reference = {
91
+ "title": title,
92
+ "author": author,
93
+ "category": category,
94
+ "year": year,
95
+ "publisher": publisher,
96
+ }
97
+
98
+ # Process the file and handle data ingestion
99
+ response = await data_ingestion(category_id, reference, file)
100
+
101
+ except Exception as e:
102
+ # Handle any errors related to file processing or data ingestion
103
+ print(f"File processing error: {e}")
104
+ return JSONResponse(status_code=500, content="File processing error")
105
+
106
+ # Return a successful response with the uploaded filename and response from data ingestion
107
+ return {
108
+ "filename": file.filename,
109
+ "response": response,
110
+ "info": "upload file successfully",
111
+ }
112
+
113
+
114
+ @router.get("/book")
115
+ async def get_metadata(user: user_dependency, db: db_dependency):
116
+ if user is None:
117
+ return JSONResponse(status_code=401, content="Authentication Failed")
118
+ try:
119
+ # Join Metadata with Category to get the category name
120
+ results = (
121
+ db.query(
122
+ Metadata.id,
123
+ Metadata.title,
124
+ Metadata.author,
125
+ Category.category, # Assuming this is the correct field for category name
126
+ Category.id,
127
+ Metadata.year,
128
+ Metadata.publisher,
129
+ Metadata.thumbnail,
130
+ )
131
+ .join(Category, Metadata.category_id == Category.id)
132
+ .all()
133
+ )
134
+
135
+ # Transform results into MetadataResponse model with optional thumbnail handling
136
+ return [
137
+ MetadataResponse(
138
+ id = id,
139
+ title=title,
140
+ author=author,
141
+ category=category,
142
+ category_id = category_id,
143
+ year=year,
144
+ publisher=publisher,
145
+ thumbnail=(
146
+ thumbnail if thumbnail else None
147
+ ), # Ensure None if thumbnail is not present
148
+ )
149
+ for id, title, author, category, category_id, year, publisher, thumbnail in results
150
+ ]
151
+ except SQLAlchemyError as db_exc:
152
+ print(f"Database error: {db_exc}")
153
+ return JSONResponse(status_code=500, content="Database error occurred")
154
+
155
+
156
+ @router.put("/book/{metadata_id}")
157
+ async def update_metadata(
158
+ user: user_dependency,
159
+ db: db_dependency,
160
+ metadata_id: int,
161
+ title: str = Form(...),
162
+ author: str = Form(...),
163
+ category_id: int = Form(...),
164
+ year: int = Form(...),
165
+ publisher: str = Form(...),
166
+ thumbnail: Optional[UploadFile] = File(None),
167
+ ):
168
+ if user is None or user.get("role_id") != 1:
169
+ return JSONResponse(status_code=401, content="Authentication Failed")
170
+
171
+ try:
172
+ # old_metadata = await get_database.get_data_by_id(metadata_id)
173
+
174
+ old_metadata = db.execute(
175
+ select(Metadata).where(Metadata.id == metadata_id)
176
+ ).scalar_one_or_none()
177
+
178
+ if old_metadata is None:
179
+ return JSONResponse(status_code=404, content="Metadata not found.")
180
+
181
+ # Fetch old and new categories
182
+ old_category = db.execute(
183
+ select(Category.category).join(Metadata).where(Metadata.id == metadata_id)
184
+ ).scalar_one_or_none()
185
+
186
+ new_category = db.execute(
187
+ select(Category.category).where(Category.id == category_id)
188
+ ).scalar_one_or_none()
189
+
190
+ if old_category is None or new_category is None:
191
+ return JSONResponse(status_code=404, content="Category not found.")
192
+
193
+ # Prepare the references
194
+ old_reference = {
195
+ "title": old_metadata.title,
196
+ "author": old_metadata.author,
197
+ "category": old_category,
198
+ "year": old_metadata.year,
199
+ "publisher": old_metadata.publisher,
200
+ }
201
+
202
+ new_reference = {
203
+ "title": title,
204
+ "author": author,
205
+ "category": new_category,
206
+ "year": year,
207
+ "publisher": publisher,
208
+ }
209
+
210
+ index_manager.update_vector_database(old_reference, new_reference)
211
+
212
+ # Update existing metadata entry
213
+ metadata = db.query(Metadata).filter(Metadata.id == metadata_id).first()
214
+
215
+ if not metadata:
216
+ return JSONResponse(status_code=404, content="Metadata not found")
217
+
218
+ if thumbnail is not None:
219
+ thumbnail_data = await thumbnail.read() # Read the file data
220
+ metadata.thumbnail = base64.b64encode(thumbnail_data).decode("utf-8")
221
+
222
+ # Update fields only if provided in the request
223
+ metadata.title = title
224
+ metadata.author = author
225
+ metadata.category_id = category_id
226
+ metadata.year = year
227
+ metadata.publisher = publisher
228
+
229
+ # Commit the changes to the database
230
+ db.commit()
231
+ db.refresh(metadata) # Refresh to get the updated data
232
+
233
+ category_query = select(Category.category).where(
234
+ Category.id == metadata.category_id
235
+ )
236
+ result = db.execute(category_query)
237
+ category = result.scalar_one_or_none()
238
+
239
+ return MetadataResponse(
240
+ id=metadata_id,
241
+ title=metadata.title,
242
+ author=metadata.author,
243
+ category=category, # Assuming category relationship is available
244
+ category_id=metadata.category_id,
245
+ year=metadata.year,
246
+ publisher=metadata.publisher,
247
+ thumbnail=metadata.thumbnail if metadata.thumbnail is not None else None,
248
+ )
249
+
250
+ except Exception as e:
251
+ return JSONResponse(
252
+ status_code=500, content=f"An error occurred while updating metadata as {e}"
253
+ )
254
+
255
+
256
+ @router.delete("/book/{metadata_id}")
257
+ async def delete_metadata(user: user_dependency, db: db_dependency, metadata_id: int):
258
+ if user is None or user.get("role_id") != 1:
259
+ return JSONResponse(status_code=401, content="Authentication Failed")
260
+
261
+ try:
262
+ old_reference = await get_database.get_data_by_id(metadata_id)
263
+
264
+ old_metadata = db.execute(
265
+ select(Metadata).where(Metadata.id == metadata_id)
266
+ ).scalar_one_or_none()
267
+
268
+ index_manager.delete_vector_database(old_reference)
269
+
270
+ await delete_data(metadata_id, db_conn)
271
+
272
+ return {"Status": "delete successfully"}
273
+
274
+ except Exception as e:
275
+ print(e)
276
+ return JSONResponse(
277
+ status_code=500, content="An error occurred while delete metadata"
278
+ )
api/router/book_collection.py ADDED
@@ -0,0 +1,188 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Annotated, List, Optional
2
+ from api.router.user import user_dependency
3
+ from fastapi import APIRouter, Depends
4
+ from fastapi.responses import JSONResponse
5
+ from db.models import User_Meta, Metadata, Category
6
+ from db.database import get_db
7
+ from sqlalchemy.orm import Session
8
+ from sqlalchemy.exc import SQLAlchemyError
9
+
10
+ router = APIRouter(tags=["Book_Collection"])
11
+
12
+ db_dependency = Annotated[Session, Depends(get_db)]
13
+
14
+
15
+ @router.get("/book_collection")
16
+ async def get_book_collection(
17
+ user: user_dependency,
18
+ db: db_dependency,
19
+ ):
20
+ """This function will return a BookCollection"""
21
+ if user is None:
22
+ return JSONResponse(status_code=401, content="Authentication Failed")
23
+
24
+ try:
25
+ # Fetch all User_Meta entries for the user and their associated Metadata
26
+ user_meta_entries = (
27
+ db.query(User_Meta, Metadata, Category) # Select User_Meta, Metadata, and Category
28
+ .join(Metadata, Metadata.id == User_Meta.metadata_id) # Join on metadata_id
29
+ .join(Category, Category.id == Metadata.category_id) # Join on category_id
30
+ .filter(User_Meta.user_id == user.get("id"))
31
+ .all()
32
+ )
33
+
34
+ if not user_meta_entries:
35
+ return {"info": "No book collection found"}
36
+
37
+ # Extract relevant data from the user_meta_entries
38
+ results = [
39
+ {
40
+ "user_id": user_meta.user_id,
41
+ "metadata_id": metadata.id, # Access Metadata fields
42
+ "title": metadata.title, # Replace with actual field names in Metadata
43
+ "author": metadata.author, # Replace with actual field names in Metadata
44
+ "category_name": category.category, # Replace with actual field names in Category
45
+ "year": metadata.year,
46
+ "publisher": metadata.publisher
47
+ # Add other Metadata and Category fields as needed
48
+ }
49
+ for user_meta, metadata, category in user_meta_entries # Unpack the tuple
50
+ ]
51
+
52
+ return {
53
+ "status": "success",
54
+ "book_collection": results, # Return the list of metadata
55
+ }
56
+
57
+ except SQLAlchemyError as e:
58
+ return JSONResponse(status_code=500, content=f"Database error: {str(e)}")
59
+ except Exception as e:
60
+ return JSONResponse(
61
+ status_code=500, content=f"An unexpected error occurred: {str(e)}"
62
+ )
63
+
64
+
65
+ @router.post("/book_collection")
66
+ async def request_book_collection(
67
+ user: user_dependency,
68
+ db: db_dependency,
69
+ metadata_id: List[Optional[int]],
70
+ ):
71
+ if user is None:
72
+ return JSONResponse(status_code=401, content="Authentication Failed")
73
+
74
+ try:
75
+ # Create User_Meta instances for each metadata_id
76
+ user_meta_entries = [
77
+ User_Meta(user_id=user.get("id"), metadata_id=mid) for mid in metadata_id
78
+ ]
79
+
80
+ # Insert all entries into the database
81
+ db.add_all(user_meta_entries)
82
+ db.commit() # Commit the transaction
83
+
84
+ except SQLAlchemyError as e:
85
+ return JSONResponse(status_code=500, content=f"Database error: {str(e)}")
86
+ except Exception as e:
87
+ return JSONResponse(
88
+ status_code=500, content=f"An unexpected error occurred: {str(e)}"
89
+ )
90
+
91
+ return {"status": "success", "user_meta": [entry.id for entry in user_meta_entries]}
92
+
93
+
94
+ @router.put("/book_collection")
95
+ async def update_book_collection(
96
+ user: user_dependency,
97
+ db: db_dependency,
98
+ metadata_id: List[Optional[int]], # Use the Pydantic model
99
+ ):
100
+ if user is None:
101
+ return JSONResponse(status_code=401, content="Authentication Failed")
102
+
103
+ try:
104
+ # Fetch and delete existing User_Meta entries
105
+ db.query(User_Meta).filter(User_Meta.user_id == user.get("id")).delete(
106
+ synchronize_session=False
107
+ )
108
+ db.commit()
109
+
110
+ # Insert new User_Meta entries
111
+ user_meta_entries = [
112
+ User_Meta(user_id=user.get("id"), metadata_id=mid) for mid in metadata_id
113
+ ]
114
+ db.add_all(user_meta_entries)
115
+ db.commit()
116
+ except SQLAlchemyError as e:
117
+ db.rollback()
118
+ return JSONResponse(status_code=500, content=f"Database error: {str(e)}")
119
+ except Exception as e:
120
+ return JSONResponse(status_code=500, content=f"Unexpected error: {str(e)}")
121
+
122
+ return {"status": "success", "user_meta": [entry.id for entry in user_meta_entries]}
123
+
124
+
125
+ @router.delete("/book_collection/{metadata_id}")
126
+ async def delete_book_collection(
127
+ user: user_dependency,
128
+ db: db_dependency,
129
+ metadata_id: int
130
+ ):
131
+ if user is None:
132
+ return JSONResponse(status_code=401, content="Authentication Failed")
133
+
134
+ try:
135
+ # Query to delete the entry based on metadata_id for the authenticated user
136
+ entry_to_delete = db.query(User_Meta).filter(
137
+ User_Meta.user_id == user.get("id"),
138
+ User_Meta.metadata_id == metadata_id
139
+ ).first()
140
+
141
+ if not entry_to_delete:
142
+ return JSONResponse(status_code=404, content="Entry not found to delete.")
143
+
144
+ # Delete the entry
145
+ db.delete(entry_to_delete)
146
+ db.commit()
147
+
148
+ except SQLAlchemyError as e:
149
+ db.rollback() # Rollback in case of any database error
150
+ return JSONResponse(status_code=500, content=f"Database error: {str(e)}")
151
+ except Exception as e:
152
+ return JSONResponse(
153
+ status_code=500, content=f"An unexpected error occurred: {str(e)}"
154
+ )
155
+
156
+ return {
157
+ "status": "success",
158
+ "deleted_entry": entry_to_delete.id,
159
+ }
160
+
161
+
162
+ @router.delete("/all_collections")
163
+ async def delete_all_book(
164
+ user: user_dependency,
165
+ db: db_dependency,
166
+ ):
167
+ if user is None:
168
+ return JSONResponse(status_code=401, content="Authentication Failed")
169
+
170
+ try:
171
+ db.query(User_Meta).filter(User_Meta.user_id == user.get("id")).delete(
172
+ synchronize_session="fetch"
173
+ )
174
+
175
+ db.commit() # Commit all deletions
176
+
177
+ return {
178
+ "status": "success",
179
+ "delete book collection from": user.get("id"),
180
+ }
181
+
182
+ except SQLAlchemyError as e:
183
+ db.rollback() # Rollback in case of any database error
184
+ return JSONResponse(status_code=500, content=f"Database error: {str(e)}")
185
+ except Exception as e:
186
+ return JSONResponse(
187
+ status_code=500, content=f"An unexpected error occurred: {str(e)}"
188
+ )
api/router/bot_general.py ADDED
@@ -0,0 +1,94 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import APIRouter, Depends
2
+ from fastapi.responses import JSONResponse
3
+ from service.dto import UserPromptRequest, BotResponse
4
+ from core.chat.chatstore import ChatStore
5
+ from db.database import get_db
6
+ from db.models import Session_Publisher
7
+ from sqlalchemy.orm import Session
8
+ from api.function import (
9
+ generate_streaming_completion,
10
+ generate_completion_non_streaming,
11
+ )
12
+ from api.router.user import user_dependency
13
+ from sse_starlette.sse import EventSourceResponse
14
+ from utils.utils import generate_uuid
15
+ from typing import Annotated
16
+ from langfuse.llama_index import LlamaIndexCallbackHandler
17
+
18
+
19
+ router = APIRouter(tags=["Bot_General"])
20
+
21
+ db_dependency = Annotated[Session, Depends(get_db)]
22
+
23
+
24
+ def get_chat_store():
25
+ return ChatStore()
26
+
27
+
28
+ @router.post("/bot_general/new")
29
+ async def create_session_general():
30
+ session_id = generate_uuid()
31
+ return {"session_id": session_id}
32
+
33
+
34
+ @router.get("/bot/{session_id}")
35
+ async def get_session_id(
36
+ user: user_dependency,
37
+ session_id: str,
38
+ chat_store: ChatStore = Depends(get_chat_store),
39
+ ):
40
+ if user is None:
41
+ return JSONResponse(status_code=401, content="Authentication Failed")
42
+
43
+ chat_history = chat_store.get_all_messages_mongodb(session_id)
44
+ # if not chat_history:
45
+ # return JSONResponse(status_code=404, content="Session not found or empty.")
46
+
47
+ return chat_history
48
+
49
+
50
+ @router.post("/bot/{session_id}")
51
+ async def bot_generator_general(
52
+ session_id: str, user_prompt_request: UserPromptRequest
53
+ ):
54
+
55
+ langfuse_callback_handler = LlamaIndexCallbackHandler()
56
+ langfuse_callback_handler.set_trace_params(user_id="guest", session_id=session_id)
57
+
58
+ if user_prompt_request.streaming:
59
+ return EventSourceResponse(
60
+ generate_streaming_completion(
61
+ user_prompt_request.prompt,
62
+ session_id
63
+ )
64
+ )
65
+ else:
66
+ response, metadata, scores = generate_completion_non_streaming(
67
+ session_id,
68
+ user_prompt_request.prompt,
69
+ )
70
+
71
+ return BotResponse(
72
+ content=response,
73
+ metadata=metadata,
74
+ scores=scores,
75
+ )
76
+
77
+
78
+ @router.delete("/bot/{session_id}")
79
+ async def delete_bot(db:db_dependency, session_id: str, chat_store: ChatStore = Depends(get_chat_store)):
80
+ try:
81
+ chat_store.delete_messages(session_id)
82
+
83
+ # Delete session from database
84
+ session = db.query(Session_Publisher).filter(Session_Publisher.id == session_id).first()
85
+ if session:
86
+ db.delete(session)
87
+ db.commit() # Commit the transaction
88
+ else:
89
+ return JSONResponse(status_code=404, content="Session not found")
90
+ return {"info": f"Delete {session_id} successful"}
91
+ except Exception as e:
92
+ # Log the error and return JSONResponse for FastAPI
93
+ print(f"An error occurred in update data.: {e}")
94
+ return JSONResponse(status_code=400, content="the error when deleting message")
api/router/bot_one.py ADDED
@@ -0,0 +1,151 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Annotated
2
+ from fastapi import APIRouter, Depends
3
+ from fastapi.responses import JSONResponse
4
+ from service.dto import UserPromptRequest, BotResponse
5
+ from core.chat.chatstore import ChatStore
6
+ from db.database import get_db
7
+ from db.models import Metadata, Session_Publisher
8
+ from db.models import Session as SessionModel
9
+ from sqlalchemy.orm import Session
10
+ from sqlalchemy.exc import SQLAlchemyError
11
+ from sqlalchemy import select
12
+ from api.function import (
13
+ generate_streaming_completion,
14
+ generate_completion_non_streaming,
15
+ )
16
+ from datetime import datetime
17
+ from api.router.user import user_dependency
18
+ from sse_starlette.sse import EventSourceResponse
19
+ from utils.utils import generate_uuid
20
+ from langfuse.llama_index import LlamaIndexCallbackHandler
21
+
22
+
23
+ router = APIRouter(tags=["Bot_One"])
24
+
25
+ db_dependency = Annotated[Session, Depends(get_db)]
26
+
27
+
28
+ def get_chat_store():
29
+ return ChatStore()
30
+
31
+
32
+ @router.post("/bot_one/{metadata_id}")
33
+ async def create_bot_one(user: user_dependency, db: db_dependency, metadata_id: int):
34
+ if user is None:
35
+ return JSONResponse(status_code=401, content="Authentication Failed")
36
+ # Generate a new session ID (UUID)
37
+ try:
38
+ session_id = generate_uuid()
39
+
40
+ # Create the new session
41
+ new_session = Session_Publisher(
42
+ id=session_id,
43
+ user_id=user.get("id"),
44
+ metadata_id=metadata_id,
45
+ )
46
+
47
+ db.add(new_session)
48
+ db.commit() # Commit the new session to the database
49
+
50
+ return {
51
+ "statur": "session id created successfully",
52
+ "session_id": session_id,
53
+ }
54
+
55
+ except Exception as e:
56
+ return JSONResponse(
57
+ status_code=500, content=f"An unexpected in retrieving session id {str(e)}"
58
+ )
59
+
60
+
61
+ @router.post("/bot_one/{metadata_id}/{session_id}")
62
+ async def generator_bot_one(
63
+ user: user_dependency,
64
+ db: db_dependency,
65
+ metadata_id: int,
66
+ session_id: str,
67
+ user_prompt_request: UserPromptRequest,
68
+ ):
69
+ if user is None:
70
+ return JSONResponse(status_code=401, content="Authentication Failed")
71
+
72
+ langfuse_callback_handler = LlamaIndexCallbackHandler()
73
+ langfuse_callback_handler.set_trace_params(
74
+ user_id=user.get("username"), session_id=session_id
75
+ )
76
+
77
+ # Query to retrieve the titles
78
+ try:
79
+ query = (
80
+ select(Metadata.title)
81
+ .join(Session_Publisher, Metadata.id == metadata_id)
82
+ .where(
83
+ Session_Publisher.user_id == user.get("id"),
84
+ Session_Publisher.id == session_id,
85
+ )
86
+ )
87
+
88
+ result = db.execute(query)
89
+ titles = result.scalars().all()
90
+ print(titles)
91
+
92
+ except SQLAlchemyError as e:
93
+ return JSONResponse(status_code=500, content=f"Database error: {str(e)}")
94
+ except Exception as e:
95
+ return JSONResponse(
96
+ status_code=500, content=f"An unexpected error occurred: {str(e)}"
97
+ )
98
+
99
+ if user_prompt_request.streaming:
100
+ return EventSourceResponse(
101
+ generate_streaming_completion(
102
+ user_prompt_request.prompt,
103
+ session_id,
104
+ )
105
+ )
106
+ else:
107
+ response, metadata, scores = generate_completion_non_streaming(
108
+ session_id, user_prompt_request.prompt, titles, type_bot="specific"
109
+ )
110
+
111
+ existing_session = (
112
+ db.query(Session_Publisher).filter(Session_Publisher.id == session_id).first()
113
+ )
114
+ existing_session.updated_at = datetime.now()
115
+ db.commit()
116
+
117
+ return BotResponse(
118
+ content=response,
119
+ metadata=metadata,
120
+ scores=scores,
121
+ )
122
+
123
+
124
+ @router.get("/bot_one{metadata_id}")
125
+ async def get_all_session_bot_one(
126
+ user: user_dependency, db: db_dependency, metadata_id: int
127
+ ):
128
+ if user is None:
129
+ return JSONResponse(status_code=401, content="Authentication Failed")
130
+
131
+ try:
132
+ # Query the session IDs based on the user ID
133
+ query = select(Session_Publisher.id, Session_Publisher.updated_at).where(
134
+ Session_Publisher.user_id == user.get("id"),
135
+ Session_Publisher.metadata_id == metadata_id,
136
+ )
137
+
138
+ result = db.execute(query)
139
+ sessions = result.all()
140
+
141
+ session_data = [{"id": session.id, "updated_at": str(session.updated_at)} for session in sessions]
142
+
143
+ # Convert list of tuples to a simple list
144
+ session_sorted_data = sorted(session_data, key=lambda x: datetime.fromisoformat(x['updated_at']), reverse=True)
145
+
146
+ return session_sorted_data
147
+
148
+ except Exception as e:
149
+ # Log the error and return JSONResponse for FastAPI
150
+ print(f"An error occurred while fetching session IDs: {e}")
151
+ return JSONResponse(status_code=400, content="Error retrieving session IDs")
api/router/bot_specific.py ADDED
@@ -0,0 +1,342 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Annotated, List, Optional
2
+ from fastapi import APIRouter, Depends
3
+ from fastapi.responses import JSONResponse
4
+ from service.dto import UserPromptRequest, BotResponse, BotCreateRequest
5
+ from core.chat.chatstore import ChatStore
6
+ from db.database import get_db
7
+ from db.models import Bot_Meta, Bot, Metadata
8
+ from db.models import Session as SessionModel
9
+ from sqlalchemy.orm import Session
10
+ from sqlalchemy.exc import SQLAlchemyError
11
+ from sqlalchemy.exc import NoResultFound
12
+ from sqlalchemy import select
13
+ from api.function import (
14
+ generate_streaming_completion,
15
+ generate_completion_non_streaming,
16
+ )
17
+ from api.router.user import user_dependency
18
+ from sse_starlette.sse import EventSourceResponse
19
+ from utils.utils import generate_uuid
20
+ from langfuse.llama_index import LlamaIndexCallbackHandler
21
+ from datetime import datetime
22
+
23
+
24
+ router = APIRouter(tags=["Bot_Specific"])
25
+
26
+ db_dependency = Annotated[Session, Depends(get_db)]
27
+
28
+
29
+ def get_chat_store():
30
+ return ChatStore()
31
+
32
+
33
+ @router.post("/bot")
34
+ async def create_bot_id(
35
+ user: user_dependency,
36
+ db: db_dependency,
37
+ bot_request: BotCreateRequest,
38
+ ):
39
+
40
+ if user is None:
41
+ return JSONResponse(status_code=401, content="Authentication Failed")
42
+
43
+ # Create a new bot entry
44
+ try:
45
+ # Create a new bot entry
46
+ new_bot = Bot(
47
+ user_id=user.get("id"), bot_name=bot_request.bot_name
48
+ ) # Assuming user has an 'id' attribute
49
+
50
+ db.add(new_bot)
51
+ db.commit() # Commit the transaction
52
+ db.refresh(new_bot) # Optional: Refresh the instance with the database state
53
+
54
+ return {"status": "success", "bot_id": new_bot.id}
55
+
56
+ except SQLAlchemyError as e:
57
+ db.rollback() # Roll back the transaction in case of an error
58
+ return JSONResponse(status_code=500, content=f"Database error: {str(e)}")
59
+ except Exception as e:
60
+ return JSONResponse(
61
+ status_code=500, content=f"An unexpected error occurred: {str(e)}"
62
+ )
63
+
64
+
65
+ @router.post("/meta/{bot_id}")
66
+ async def create_bot_specific(
67
+ user: user_dependency,
68
+ db: db_dependency,
69
+ bot_id: int,
70
+ metadata_id: List[Optional[int]], # Use the Pydantic model
71
+ ):
72
+ if user is None:
73
+ return JSONResponse(status_code=401, content="Authentication Failed")
74
+
75
+ try:
76
+ # Create BotMeta instances for each metadata_id
77
+ bot_meta_entries = [
78
+ Bot_Meta(bot_id=bot_id, metadata_id=mid) for mid in metadata_id
79
+ ]
80
+
81
+ # Insert all entries into the database
82
+ db.add_all(bot_meta_entries)
83
+ db.commit() # Commit the transaction
84
+
85
+ except SQLAlchemyError as e:
86
+ return JSONResponse(status_code=500, content=f"Database error: {str(e)}")
87
+ except Exception as e:
88
+ return JSONResponse(
89
+ status_code=500, content=f"An unexpected error occurred: {str(e)}"
90
+ )
91
+
92
+ return {"status": "success", "bot_meta": [entry.id for entry in bot_meta_entries]}
93
+
94
+
95
+ @router.put("/meta/{bot_id}")
96
+ async def update_bot_specific(
97
+ user: user_dependency,
98
+ db: db_dependency,
99
+ bot_id: int,
100
+ metadata_id: List[Optional[int]], # Use the Pydantic model
101
+ ):
102
+ if user is None:
103
+ return JSONResponse(status_code=401, content="Authentication Failed")
104
+
105
+ try:
106
+ # Fetch existing Bot_Meta entries related to bot_id
107
+ existing_entries = db.query(Bot_Meta).filter(Bot_Meta.bot_id == bot_id).all()
108
+
109
+ # Delete existing entries
110
+ for entry in existing_entries:
111
+ db.delete(entry)
112
+
113
+ # Commit the deletion
114
+ db.commit()
115
+
116
+ # Insert the new metadata entries
117
+ bot_meta_entries = [
118
+ Bot_Meta(bot_id=bot_id, metadata_id=mid) for mid in metadata_id
119
+ ]
120
+ db.add_all(bot_meta_entries)
121
+ db.commit()
122
+
123
+ except SQLAlchemyError as e:
124
+ db.rollback() # Rollback in case of any database error
125
+ return JSONResponse(status_code=500, content=f"Database error: {str(e)}")
126
+ except Exception as e:
127
+ return JSONResponse(
128
+ status_code=500, content=f"An unexpected error occurred: {str(e)}"
129
+ )
130
+
131
+ return {"status": "success", "bot_meta": [entry.id for entry in bot_meta_entries]}
132
+
133
+
134
+ @router.delete("/meta/{bot_id}/{metadata_id}")
135
+ async def delete_bot_specific(
136
+ user: user_dependency,
137
+ db: db_dependency,
138
+ bot_id: int,
139
+ metadata_id: int, # Changed to int to specify a single metadata_id
140
+ ):
141
+ if user is None:
142
+ return JSONResponse(status_code=401, content="Authentication Failed")
143
+
144
+ try:
145
+ # Delete the specific metadata entry for the given bot_id
146
+ bot_meta_entry = (
147
+ db.query(Bot_Meta)
148
+ .filter(Bot_Meta.bot_id == bot_id, Bot_Meta.metadata_id == metadata_id)
149
+ .first() # Use first() to get a single entry
150
+ )
151
+
152
+ if not bot_meta_entry:
153
+ return JSONResponse(status_code=404, content="No entry found to delete.")
154
+
155
+ # Delete the found entry
156
+ db.delete(bot_meta_entry)
157
+ db.commit()
158
+
159
+ except SQLAlchemyError as e:
160
+ db.rollback() # Rollback in case of any database error
161
+ return JSONResponse(status_code=500, content=f"Database error: {str(e)}")
162
+ except Exception as e:
163
+ return JSONResponse(
164
+ status_code=500, content=f"An unexpected error occurred: {str(e)}"
165
+ )
166
+
167
+ return {
168
+ "status": "success",
169
+ "deleted_entry_id": bot_meta_entry.id,
170
+ }
171
+
172
+
173
+ @router.delete("/bot_all/{bot_id}")
174
+ async def delete_bot_id(
175
+ user: user_dependency,
176
+ db: db_dependency,
177
+ bot_id: int,
178
+ ):
179
+ if user is None:
180
+ return JSONResponse(status_code=401, content="Authentication Failed")
181
+
182
+ try:
183
+ # Fetch the bot to ensure it exists
184
+ bot_entry = db.query(Bot).filter(Bot.id == bot_id).first()
185
+ print("bot entry", bot_entry)
186
+
187
+ if not bot_entry:
188
+ return JSONResponse(
189
+ status_code=404, content=f"Bot with id {bot_id} not found."
190
+ )
191
+
192
+ db.query(SessionModel).filter(SessionModel.bot_id == bot_id).delete(
193
+ synchronize_session="fetch"
194
+ )
195
+ db.query(Bot_Meta).filter(Bot_Meta.bot_id == bot_id).delete(
196
+ synchronize_session="fetch"
197
+ )
198
+ db.delete(bot_entry)
199
+ db.commit() # Commit all deletions
200
+
201
+ return {
202
+ "status": "success",
203
+ "deleted_bot_id": bot_id,
204
+ }
205
+
206
+ except SQLAlchemyError as e:
207
+ db.rollback() # Rollback in case of any database error
208
+ return JSONResponse(status_code=500, content=f"Database error: {str(e)}")
209
+ except Exception as e:
210
+ return JSONResponse(
211
+ status_code=500, content=f"An unexpected error occurred: {str(e)}"
212
+ )
213
+
214
+
215
+ @router.post("/session/{bot_id}/new")
216
+ async def create_new_session(user: user_dependency, db: db_dependency, bot_id: int):
217
+ # Check if user is authenticated
218
+ if user is None:
219
+ return JSONResponse(status_code=401, content="Authentication Failed")
220
+
221
+ print(user.get("id"))
222
+ user_id = user.get("id")
223
+ # Ensure the bot belongs to the user
224
+ bot_query = select(Bot).where(Bot.id == bot_id, Bot.user_id == user_id)
225
+
226
+ try:
227
+ bot = db.execute(bot_query).scalar_one()
228
+ print(bot)
229
+
230
+ except NoResultFound:
231
+ return JSONResponse(
232
+ status_code=404, content="Bot not found or unauthorized access."
233
+ )
234
+
235
+ # Generate a new session ID (UUID)
236
+ try:
237
+ session_id = generate_uuid()
238
+
239
+ # Create the new session
240
+ new_session = SessionModel(
241
+ id=session_id,
242
+ user_id=user.get("id"),
243
+ bot_id=bot_id,
244
+ )
245
+
246
+ db.add(new_session)
247
+ db.commit() # Commit the new session to the database
248
+
249
+ return {
250
+ "session_id": session_id,
251
+ }
252
+
253
+ except Exception as e:
254
+ return JSONResponse(
255
+ status_code=500, content=f"An unexpected in retrieving session id {str(e)}"
256
+ )
257
+
258
+
259
+ @router.get("/bot/all/{bot_id}")
260
+ async def get_all_session_ids(user: user_dependency, db: db_dependency, bot_id: int):
261
+ if user is None:
262
+ return JSONResponse(status_code=401, content="Authentication Failed")
263
+
264
+ try:
265
+ query = select(SessionModel.id, SessionModel.updated_at).where(
266
+ SessionModel.user_id == user.get("id"),
267
+ SessionModel.bot_id == bot_id
268
+ )
269
+
270
+ result = db.execute(query)
271
+ sessions = result.all()
272
+ session_data = [{"id": session.id, "updated_at": session.updated_at} for session in sessions]
273
+
274
+ return session_data
275
+
276
+ except Exception as e:
277
+ # Log the error and return JSONResponse for FastAPI
278
+ print(f"An error occurred while fetching session IDs: {e}")
279
+ return JSONResponse(status_code=400, content="Error retrieving session IDs")
280
+
281
+
282
+ @router.post("/bot/{bot_id}/{session_id}")
283
+ async def bot_generator_spesific(
284
+ user: user_dependency,
285
+ db: db_dependency,
286
+ bot_id: int,
287
+ session_id: str,
288
+ user_prompt_request: UserPromptRequest,
289
+ ):
290
+ if user is None:
291
+ return JSONResponse(status_code=401, content="Authentication Failed")
292
+
293
+ langfuse_callback_handler = LlamaIndexCallbackHandler()
294
+ langfuse_callback_handler.set_trace_params(
295
+ user_id=user.get("username"), session_id=session_id
296
+ )
297
+
298
+ # Query to retrieve the titles
299
+ try:
300
+ query = (
301
+ select(Metadata.title)
302
+ .join(Bot_Meta, Metadata.id == Bot_Meta.metadata_id)
303
+ .join(SessionModel, Bot_Meta.bot_id == bot_id)
304
+ .where(
305
+ SessionModel.user_id == user.get("id"), SessionModel.id == session_id
306
+ )
307
+ )
308
+
309
+ result = db.execute(query)
310
+ titles = result.scalars().all()
311
+ print(titles)
312
+
313
+ except SQLAlchemyError as e:
314
+ return JSONResponse(status_code=500, content=f"Database error: {str(e)}")
315
+ except Exception as e:
316
+ return JSONResponse(
317
+ status_code=500, content=f"An unexpected error occurred: {str(e)}"
318
+ )
319
+
320
+ if user_prompt_request.streaming:
321
+ return EventSourceResponse(
322
+ generate_streaming_completion(
323
+ user_prompt_request.prompt,
324
+ session_id,
325
+ )
326
+ )
327
+ else:
328
+ response, metadata, scores = generate_completion_non_streaming(
329
+ session_id, user_prompt_request.prompt, titles, type_bot="specific"
330
+ )
331
+
332
+ existing_session = (
333
+ db.query(SessionModel).filter(SessionModel.id == session_id).first()
334
+ )
335
+ existing_session.updated_at = datetime.now()
336
+ db.commit()
337
+
338
+ return BotResponse(
339
+ content=response,
340
+ metadata=metadata,
341
+ scores=scores,
342
+ )
api/router/category.py ADDED
@@ -0,0 +1,158 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from api.router.user import user_dependency
2
+ from fastapi.responses import JSONResponse
3
+ from fastapi import APIRouter, HTTPException, Depends, Query
4
+
5
+ from db.models import Category
6
+ from db.database import get_db
7
+ from service.dto import CategoryCreate
8
+
9
+ from script.vector_db import IndexManager
10
+ from sqlalchemy.orm import Session
11
+ from sqlalchemy.exc import SQLAlchemyError, IntegrityError
12
+
13
+ from typing import Annotated
14
+
15
+ router = APIRouter(tags=["Category"])
16
+
17
+ index_manager = IndexManager()
18
+ db_dependency = Annotated[Session, Depends(get_db)]
19
+
20
+
21
+ @router.get("/category")
22
+ async def get_all_categories(user: user_dependency, db: db_dependency):
23
+ if user is None or user.get("role_id") != 1:
24
+ return JSONResponse(status_code=401, content="Authentication Failed")
25
+
26
+ try:
27
+ # Logic to retrieve all categories
28
+ categories = db.query(Category).all()
29
+ if not categories:
30
+ return JSONResponse(status_code=404, content="No categories found")
31
+
32
+ return {
33
+ "message": "Categories retrieved successfully",
34
+ "categories": [
35
+ {"id": cat.id, "category": cat.category} for cat in categories
36
+ ],
37
+ }
38
+
39
+ except SQLAlchemyError as e:
40
+ return JSONResponse(
41
+ status_code=500, content="Database error occurred: " + str(e)
42
+ )
43
+
44
+
45
+ @router.get("/category/{category_id}")
46
+ async def get_categories_by_ids(
47
+ user: user_dependency,
48
+ db: db_dependency,
49
+ category_id: int,
50
+ ):
51
+ if user is None or user.get("role_id") != 1:
52
+ return JSONResponse(status_code=401, content="Authentication Failed")
53
+
54
+ try:
55
+ # Fetch categories based on the list of provided category_ids
56
+ category = db.query(Category).filter(Category.id == category_id).first()
57
+
58
+ if category is None:
59
+ return JSONResponse(status_code=404, content="No categories found for the given IDs")
60
+
61
+ return {
62
+ "message": "Categories retrieved successfully",
63
+ "category": {"id": category.id, "category": category.category},
64
+ }
65
+
66
+ except SQLAlchemyError as e:
67
+ return JSONResponse(
68
+ status_code=500, content="Database error occurred: " + str(e)
69
+ )
70
+
71
+ @router.post("/category")
72
+ async def create_category(user: user_dependency, db: db_dependency, category: CategoryCreate):
73
+ if user is None or user.get("role_id") != 1:
74
+ return JSONResponse(status_code=401, content="Authentication Failed")
75
+
76
+ try:
77
+ # Check if category already exists
78
+ existing_category = (
79
+ db.query(Category).filter(Category.category == category.category_name).first()
80
+ )
81
+ if existing_category:
82
+ return JSONResponse(status_code=400, content="Category already exists")
83
+
84
+ # Logic to create a new category
85
+ new_category = Category(category=category) # Assuming Category is your model
86
+ db.add(new_category)
87
+ db.commit()
88
+ db.refresh(new_category)
89
+
90
+ return {
91
+ "message": "Category created successfully",
92
+ "category_id": new_category.id,
93
+ }
94
+
95
+ except IntegrityError:
96
+ db.rollback()
97
+ return JSONResponse(
98
+ status_code=400,
99
+ content="Database integrity error: possibly a duplicate entry.",
100
+ )
101
+
102
+ except SQLAlchemyError as e:
103
+ db.rollback()
104
+ return JSONResponse(
105
+ status_code=500, content="Database error occurred: " + str(e)
106
+ )
107
+
108
+
109
+ @router.put("/category/{category_id}")
110
+ async def update_category(
111
+ user: user_dependency, db: db_dependency, category_id: int, category: CategoryCreate
112
+ ):
113
+ if user is None or user.get("role_id") != 1:
114
+ return JSONResponse(status_code=401, content="Authentication Failed")
115
+
116
+ try:
117
+ # Logic to update an existing category
118
+ existing_category = (
119
+ db.query(Category).filter(Category.id == category_id).first()
120
+ )
121
+ if not existing_category:
122
+ return JSONResponse(status_code=404, content="Category not found")
123
+
124
+ existing_category.category = category.category_name
125
+ db.commit()
126
+
127
+ return {"message": "Category updated successfully"}
128
+
129
+ except SQLAlchemyError as e:
130
+ db.rollback()
131
+ return JSONResponse(
132
+ status_code=500, content="Database error occurred: " + str(e)
133
+ )
134
+
135
+
136
+ @router.delete("/category/{category_id}")
137
+ async def delete_category(user: user_dependency, db: db_dependency, category_id: int):
138
+ if user is None or user.get("role_id") != 1:
139
+ return JSONResponse(status_code=401, content="Authentication Failed")
140
+
141
+ try:
142
+ # Logic to delete an existing category
143
+ existing_category = (
144
+ db.query(Category).filter(Category.id == category_id).first()
145
+ )
146
+ if not existing_category:
147
+ return JSONResponse(status_code=404, content="Category not found")
148
+
149
+ db.delete(existing_category)
150
+ db.commit()
151
+
152
+ return {"message": "Category deleted successfully"}
153
+
154
+ except SQLAlchemyError as e:
155
+ db.rollback()
156
+ return JSONResponse(
157
+ status_code=500, content="Database error occurred: " + str(e)
158
+ )
api/router/role.py CHANGED
@@ -1,20 +1,108 @@
1
- from fastapi import APIRouter
 
 
 
 
 
 
 
2
 
3
 
4
  router = APIRouter(tags=["Roles"])
 
 
5
 
6
  @router.get("/roles")
7
- async def get_data_roles():
8
- pass
 
 
 
 
 
 
 
 
 
 
 
 
9
 
 
 
 
 
10
 
 
11
  @router.post("/roles")
12
- async def add_data_roles():
13
- pass
14
- @router.put("/roles/{id}")
15
- async def update_data_roles():
16
- pass
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
17
 
 
18
  @router.delete("/roles/{id}")
19
- async def remove_data_roles():
20
- pass
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import APIRouter, HTTPException, Depends
2
+ from fastapi.responses import JSONResponse
3
+ from api.router.user import user_dependency
4
+ from typing import Annotated
5
+ from sqlalchemy.orm import Session
6
+ from db.database import get_db
7
+ from db.models import Role, User
8
+ from service.dto import RoleCreate, RoleUpdate
9
 
10
 
11
  router = APIRouter(tags=["Roles"])
12
+ db_dependency = Annotated[Session, Depends(get_db)]
13
+
14
 
15
  @router.get("/roles")
16
+ async def get_data_roles(user: user_dependency, db: db_dependency):
17
+ # Periksa apakah user valid dan memiliki role_id = 1
18
+ if user is None or user.get("role_id") != 1:
19
+ return JSONResponse(status_code=401, content="Authentication Failed")
20
+
21
+ try:
22
+ # Query data role
23
+ roles = db.query(Role).all()
24
+
25
+ # Jika tidak ada role ditemukan, kembalikan 404
26
+ if not roles:
27
+ return JSONResponse(status_code=404, content="No roles found")
28
+
29
+ return roles
30
 
31
+ except Exception as e:
32
+ # Menangkap kesalahan untuk debug
33
+ print(f"Error fetching roles: {str(e)}")
34
+ return JSONResponse(status_code=500, content="Internal Server Error")
35
 
36
+ # POST: Add a new role
37
  @router.post("/roles")
38
+ async def add_data_roles(
39
+ role_data: RoleCreate, user: user_dependency, db: db_dependency
40
+ ):
41
+ if user is None or user.get("role_id") != 1:
42
+ return JSONResponse(status_code=401, content="Authentication Failed")
43
+
44
+ new_role = Role(name=role_data.name)
45
+ db.add(new_role)
46
+ db.commit()
47
+ db.refresh(new_role)
48
+
49
+ return {"message": "Role added successfully", "role": new_role}
50
+
51
+
52
+ @router.put("/roles/{role_id}")
53
+ async def update_data_roles(
54
+ role_id: int, role_data: RoleUpdate, user: user_dependency, db: db_dependency
55
+ ):
56
+ if user is None or user.get("role_id") != 1:
57
+ return JSONResponse(status_code=401, content="Authentication Failed")
58
+
59
+ role = db.query(Role).filter(Role.id == id).first()
60
+ if role is None:
61
+ return JSONResponse(status_code=404, content="Role not found")
62
+
63
+ role.name = role_data.name
64
+ db.commit()
65
+ db.refresh(role)
66
+
67
+ return {"message": "Role updated successfully", "role": role}
68
+
69
 
70
+ # DELETE: Remove a role
71
  @router.delete("/roles/{id}")
72
+ async def remove_data_roles(id: int, user: user_dependency, db: db_dependency):
73
+ if user is None or user.get("role_id") != 1:
74
+ return JSONResponse(status_code=401, content="Authentication Failed")
75
+
76
+ role = db.query(Role).filter(Role.id == id).first()
77
+ if role is None:
78
+ return JSONResponse(status_code=404, content="Role not found")
79
+
80
+ db.delete(role)
81
+ db.commit()
82
+
83
+ return {"message": "Role removed successfully"}
84
+
85
+
86
+ @router.put("/role_user/{id}")
87
+ async def update_user_role(
88
+ user: user_dependency, db: db_dependency, user_id: int, role_data: RoleUpdate
89
+ ):
90
+ # Check if the current user is authenticated and has an admin role (role_id == 1)
91
+ if user is None or user.get("role_id") != 1:
92
+ return JSONResponse(status_code=401, content="Authentication Failed")
93
+
94
+ # Fetch the user to be updated
95
+ user_to_update = db.query(User).filter(User.id == user_id).first()
96
+ if user_to_update is None:
97
+ return JSONResponse(status_code=404, content="User not found")
98
+
99
+ # Update the user's role
100
+ user_to_update.role_id = (
101
+ role_data.role_id
102
+ ) # Assuming role_data contains the new role_id
103
+
104
+ # Commit the changes to the database
105
+ db.commit()
106
+ db.refresh(user_to_update)
107
+
108
+ return {"message": "User role updated successfully", "user": user_to_update}
api/router/trial.py CHANGED
@@ -3,18 +3,17 @@ from fastapi import APIRouter
3
 
4
  router = APIRouter(tags=["Trial"])
5
 
6
- @router.get("/roles")
7
  async def get_trial_data():
8
  pass
9
 
10
-
11
- @router.post("/roles")
12
  async def add_trial_data():
13
  pass
14
- @router.put("/roles/{id}")
15
  async def update_trial_data():
16
  pass
17
 
18
- @router.delete("/roles/{id}")
19
  async def remove_trial_data():
20
  pass
 
3
 
4
  router = APIRouter(tags=["Trial"])
5
 
6
+ @router.get("/trials")
7
  async def get_trial_data():
8
  pass
9
 
10
+ @router.post("/trials")
 
11
  async def add_trial_data():
12
  pass
13
+ @router.put("/trials/{id}")
14
  async def update_trial_data():
15
  pass
16
 
17
+ @router.delete("/trials/{id}")
18
  async def remove_trial_data():
19
  pass
api/router/user.py CHANGED
@@ -1,20 +1,153 @@
1
- from fastapi import APIRouter
 
 
 
 
 
 
 
 
 
 
2
 
3
 
4
  router = APIRouter(tags=["User"])
5
 
6
- @router.post("/login")
7
- async def get_data_roles():
8
- pass
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9
 
10
 
11
  @router.post("/register")
12
- async def register_user():
13
- pass
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
14
  @router.post("/forgot_password")
15
  async def forget_password():
16
  pass
17
 
 
18
  @router.post("/change_password")
19
- async def change_password():
20
- pass
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import APIRouter, Depends, status
2
+ from fastapi.security import OAuth2PasswordRequestForm
3
+ from fastapi.responses import JSONResponse
4
+ from db.models import User
5
+ from db.database import get_db
6
+ from api.auth import get_current_user, create_access_token
7
+ from service.dto import CreateUserRequest, UserVerification, Token
8
+ from typing import Annotated
9
+ from passlib.context import CryptContext
10
+ from sqlalchemy.orm import Session
11
+ from datetime import timedelta
12
 
13
 
14
  router = APIRouter(tags=["User"])
15
 
16
+ bcrypt_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
17
+
18
+ db_dependency = Annotated[Session, Depends(get_db)]
19
+ user_dependency = Annotated[dict, Depends(get_current_user)]
20
+
21
+ ACCESS_TOKEN_EXPIRE_MINUTES = 43200
22
+
23
+
24
+ @router.post("/login", response_model=Token)
25
+ async def login_for_access_token(
26
+ login_data: Annotated[OAuth2PasswordRequestForm, Depends()],
27
+ db: Session = Depends(get_db),
28
+ ):
29
+ user = db.query(User).filter(User.username == login_data.username).first()
30
+
31
+ if not user or not bcrypt_context.verify(login_data.password, user.password_hash):
32
+ return JSONResponse(
33
+ status_code=status.HTTP_401_UNAUTHORIZED,
34
+ content="Incorrect username or password",
35
+ headers={"WWW-Authenticate": "Bearer"},
36
+ )
37
+
38
+ try:
39
+ access_token_expires = timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES)
40
+ access_token = create_access_token(
41
+ user.username, user.name, user.id, user.role_id, access_token_expires, user.email
42
+ )
43
+
44
+ return {"access_token": access_token, "token_type": "bearer"}
45
+
46
+ except Exception as e:
47
+ print(e)
48
+ return JSONResponse(
49
+ status_code=500, content="An error occuring when login"
50
+ )
51
+
52
+
53
+ @router.get("/login", response_model=dict)
54
+ async def get_user(user: user_dependency):
55
+ if user is None:
56
+ return JSONResponse(
57
+ status_code=401, content="Authentication Failed"
58
+ )
59
+ return {
60
+ "username": user.get("username"),
61
+ "name" : user.get("name"),
62
+ "id": user.get("id"),
63
+ "email": user.get("email"),
64
+ "role": user.get("role_id"),
65
+ }
66
+
67
+
68
+ @router.get("/users", response_model=list[dict])
69
+ async def get_all_users(user: user_dependency, db: Session = Depends(get_db)):
70
+ # Check if the current user has an admin role
71
+ if user.get("role_id") != 1: # Adjust this check based on how roles are represented
72
+ return JSONResponse(
73
+ status_code=401, content="Authentication Failed"
74
+ )
75
+
76
+ # Query the database to retrieve all users
77
+ users = db.query(
78
+ User
79
+ ).all() # Assuming you have a User model with an SQLAlchemy session
80
+ return [
81
+ {
82
+ "id": user.id,
83
+ "username": user.username,
84
+ "name": user.name,
85
+ "email": user.email,
86
+ "role": user.role_id,
87
+ }
88
+ for user in users
89
+ ]
90
 
91
 
92
  @router.post("/register")
93
+ async def register_user(db: db_dependency, create_user_request: CreateUserRequest):
94
+ existing_user = (
95
+ db.query(User).filter(User.email == create_user_request.email).first()
96
+ )
97
+
98
+ if existing_user:
99
+ return JSONResponse(
100
+ status_code=400, content="Email is already registered"
101
+ )
102
+
103
+ try:
104
+ password_hash = bcrypt_context.hash(create_user_request.password)
105
+
106
+ create_user_model = User(
107
+ name=create_user_request.name,
108
+ username=create_user_request.username,
109
+ email=create_user_request.email,
110
+ role_id=create_user_request.role_id,
111
+ password_hash=password_hash,
112
+ )
113
+
114
+ db.add(create_user_model)
115
+ db.commit()
116
+ db.refresh(create_user_model)
117
+
118
+ return {"message": "User created successfully", "user_id": create_user_model.id}
119
+ except Exception as e:
120
+ print(e)
121
+ return JSONResponse(
122
+ status_code=500, content="An error occuring when register user"
123
+ )
124
+
125
+
126
  @router.post("/forgot_password")
127
  async def forget_password():
128
  pass
129
 
130
+
131
  @router.post("/change_password")
132
+ async def change_password(
133
+ user: user_dependency, db: db_dependency, user_verification: UserVerification
134
+ ):
135
+ if user is None:
136
+ return JSONResponse(
137
+ status_code=401, content="Authentication Failed"
138
+ )
139
+ user_model = db.query(User).filter(User.id == user.get("id")).first()
140
+
141
+ if not bcrypt_context.verify(
142
+ user_verification.password, user_model.hashed_password
143
+ ):
144
+ return JSONResponse(
145
+ status_code=401, content="Error on password change"
146
+ )
147
+
148
+ user_model.hashed_password = bcrypt_context.hash(user_verification.new_password)
149
+ db.add(user_model)
150
+ db.commit()
151
+ db.refresh(user_model)
152
+
153
+ return {"message": "User's password successfully changed", "user_id": user_model.id}
app.py CHANGED
@@ -1,13 +1,25 @@
1
  from fastapi.applications import FastAPI
2
- from api.router import health, topic, user, bot, trial, role
 
 
 
 
 
 
 
 
 
 
 
3
  from fastapi.middleware.cors import CORSMiddleware
4
  from api.events import register_events
5
  from utils.utils import pipe
6
- import uvicorn
7
 
8
  def create_instance() -> FastAPI:
9
  return FastAPI()
10
 
 
11
  def add_middleware(app: FastAPI) -> FastAPI:
12
  app.add_middleware(
13
  CORSMiddleware,
@@ -18,15 +30,20 @@ def add_middleware(app: FastAPI) -> FastAPI:
18
  )
19
  return app
20
 
 
21
  def init_database(app: FastAPI) -> FastAPI:
22
  return app
23
 
24
 
25
  def register_routers(app: FastAPI) -> FastAPI:
26
  app.include_router(user.router)
27
- app.include_router(topic.router)
28
- app.include_router(bot.router)
29
- app.include_router(trial.router)
 
 
 
 
30
  app.include_router(role.router)
31
  app.include_router(health.router)
32
 
@@ -35,8 +52,13 @@ def register_routers(app: FastAPI) -> FastAPI:
35
 
36
  def init_app() -> FastAPI:
37
  app: FastAPI = pipe(
38
- create_instance(), add_middleware, init_database, register_events, register_routers
 
 
 
 
39
  )
40
  return app
41
 
 
42
  app = init_app()
 
1
  from fastapi.applications import FastAPI
2
+ from api.router import (
3
+ health,
4
+ user,
5
+ trial,
6
+ role,
7
+ book,
8
+ book_collection,
9
+ category,
10
+ bot_general,
11
+ bot_specific,
12
+ bot_one
13
+ )
14
  from fastapi.middleware.cors import CORSMiddleware
15
  from api.events import register_events
16
  from utils.utils import pipe
17
+
18
 
19
  def create_instance() -> FastAPI:
20
  return FastAPI()
21
 
22
+
23
  def add_middleware(app: FastAPI) -> FastAPI:
24
  app.add_middleware(
25
  CORSMiddleware,
 
30
  )
31
  return app
32
 
33
+
34
  def init_database(app: FastAPI) -> FastAPI:
35
  return app
36
 
37
 
38
  def register_routers(app: FastAPI) -> FastAPI:
39
  app.include_router(user.router)
40
+ app.include_router(category.router)
41
+ app.include_router(book.router)
42
+ app.include_router(book_collection.router)
43
+ app.include_router(bot_general.router)
44
+ app.include_router(bot_specific.router)
45
+ app.include_router(bot_one.router)
46
+ app.include_router(trial.router)
47
  app.include_router(role.router)
48
  app.include_router(health.router)
49
 
 
52
 
53
  def init_app() -> FastAPI:
54
  app: FastAPI = pipe(
55
+ create_instance(),
56
+ add_middleware,
57
+ init_database,
58
+ register_events,
59
+ register_routers,
60
  )
61
  return app
62
 
63
+
64
  app = init_app()
config.py CHANGED
@@ -9,6 +9,7 @@ class MysqlConfig(BaseSettings):
9
  DB_USERNAME: str = ""
10
  DB_PASSWORD: str = ""
11
  DB_NAME: str = ""
 
12
 
13
  class Config:
14
  env_file = ".env"
 
9
  DB_USERNAME: str = ""
10
  DB_PASSWORD: str = ""
11
  DB_NAME: str = ""
12
+ DB_URI_SQL_ALCHEMY: str = ""
13
 
14
  class Config:
15
  env_file = ".env"
controller/__init__.py ADDED
File without changes
controller/book_collection_controller.py ADDED
File without changes
controller/book_controller.py ADDED
File without changes
controller/bot_general_controller.py ADDED
File without changes
controller/bot_one_controller.py ADDED
File without changes
controller/bot_specific_controller.py ADDED
File without changes
controller/category_controller.py ADDED
File without changes
controller/user_controller.py ADDED
File without changes
core/chat/chatstore.py CHANGED
@@ -1,10 +1,10 @@
1
  import redis
2
  import os
3
  import json
4
- from fastapi import HTTPException
5
- from uuid import uuid4
6
  from typing import Optional, List
7
  from llama_index.storage.chat_store.redis import RedisChatStore
 
8
  from llama_index.core.memory import ChatMemoryBuffer
9
  from service.dto import ChatMessage
10
 
@@ -16,73 +16,88 @@ class ChatStore:
16
  port=10365,
17
  password=os.environ.get("REDIS_PASSWORD"),
18
  )
 
 
 
 
 
19
 
20
- def generate_uuid(use_hex=False):
21
- if use_hex:
22
- return str(uuid4().hex)
23
- else:
24
- return str(uuid4())
25
-
26
- def initialize_memory_bot(self, session_id=None):
27
- if session_id is None:
28
- session_id = self.generate_uuid()
29
- # chat_store = SimpleChatStore()
30
  chat_store = RedisChatStore(
31
- redis_client=self.redis_client
32
- ) # Need to be configured
33
-
34
- memory = ChatMemoryBuffer.from_defaults(
35
- token_limit=3000, chat_store=chat_store, chat_store_key=session_id
36
  )
37
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
38
  return memory
39
-
40
  def get_messages(self, session_id: str) -> List[dict]:
41
  """Get messages for a session_id."""
42
  items = self.redis_client.lrange(session_id, 0, -1)
43
  if len(items) == 0:
44
  return []
45
-
46
  # Decode and parse each item into a dictionary
47
  return [json.loads(m.decode("utf-8")) for m in items]
48
 
49
  def delete_last_message(self, session_id: str) -> Optional[ChatMessage]:
50
  """Delete last message for a session_id."""
51
  return self.redis_client.rpop(session_id)
52
-
53
- def delete_messages(self, key: str) -> Optional[List[ChatMessage]]:
54
- """Delete messages for a key."""
55
- self.redis_client.delete(key)
 
 
56
  return None
57
-
58
  def clean_message(self, session_id: str) -> Optional[ChatMessage]:
59
  """Delete specific message for a session_id."""
60
  current_list = self.redis_client.lrange(session_id, 0, -1)
61
-
62
  indices_to_delete = []
63
  for index, item in enumerate(current_list):
64
  data = json.loads(item) # Parse JSON string to dict
65
 
66
  # Logic to determine if item should be removed
67
- if (data.get("role") == "assistant" and data.get("content") is None) or (data.get("role") == "tool"):
 
 
68
  indices_to_delete.append(index)
69
 
70
  # Remove elements by their indices in reverse order
71
  for index in reversed(indices_to_delete):
72
- self.redis_client.lrem(session_id, 1, current_list[index]) # Remove the element from the list in Redis
 
 
73
 
74
  def get_keys(self) -> List[str]:
75
  """Get all keys."""
76
- try :
77
  print(self.redis_client.keys("*"))
78
  return [key.decode("utf-8") for key in self.redis_client.keys("*")]
79
-
80
  except Exception as e:
81
- # Log the error and raise HTTPException for FastAPI
82
  print(f"An error occurred in update data.: {e}")
83
- raise HTTPException(
84
- status_code=400, detail="the error when get keys"
85
- )
86
 
87
  def add_message(self, session_id: str, message: ChatMessage) -> None:
88
  """Add a message for a session_id."""
@@ -90,4 +105,57 @@ class ChatStore:
90
  self.redis_client.rpush(session_id, item)
91
 
92
  def _message_to_dict(self, message: ChatMessage) -> dict:
93
- return message.model_dump()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import redis
2
  import os
3
  import json
4
+ from fastapi.responses import JSONResponse
 
5
  from typing import Optional, List
6
  from llama_index.storage.chat_store.redis import RedisChatStore
7
+ from pymongo.mongo_client import MongoClient
8
  from llama_index.core.memory import ChatMemoryBuffer
9
  from service.dto import ChatMessage
10
 
 
16
  port=10365,
17
  password=os.environ.get("REDIS_PASSWORD"),
18
  )
19
+
20
+ uri = os.getenv("MONGO_URI")
21
+ self.client = MongoClient(uri)
22
+
23
+ def initialize_memory_bot(self, session_id):
24
 
 
 
 
 
 
 
 
 
 
 
25
  chat_store = RedisChatStore(
26
+ redis_client=self.redis_client, ttl=86400 # Time-to-live set for 1 hour
 
 
 
 
27
  )
28
 
29
+ db = self.client["bot_database"]
30
+
31
+ if (
32
+ self.redis_client.exists(session_id)
33
+ or session_id in db.list_collection_names()
34
+ ):
35
+ if session_id not in self.redis_client.keys():
36
+ self.add_chat_history_to_redis(
37
+ session_id
38
+ ) # Add chat history to Redis if not found
39
+ # Create memory buffer with chat store and session key
40
+ memory = ChatMemoryBuffer.from_defaults(
41
+ token_limit=3000, chat_store=chat_store, chat_store_key=session_id
42
+ )
43
+ else:
44
+ # Handle the case where the session doesn't exist
45
+ memory = ChatMemoryBuffer.from_defaults(
46
+ token_limit=3000, chat_store=chat_store, chat_store_key=session_id
47
+ )
48
+
49
  return memory
50
+
51
  def get_messages(self, session_id: str) -> List[dict]:
52
  """Get messages for a session_id."""
53
  items = self.redis_client.lrange(session_id, 0, -1)
54
  if len(items) == 0:
55
  return []
56
+
57
  # Decode and parse each item into a dictionary
58
  return [json.loads(m.decode("utf-8")) for m in items]
59
 
60
  def delete_last_message(self, session_id: str) -> Optional[ChatMessage]:
61
  """Delete last message for a session_id."""
62
  return self.redis_client.rpop(session_id)
63
+
64
+ def delete_messages(self, session_id: str) -> Optional[List[ChatMessage]]:
65
+ """Delete messages for a session_id."""
66
+ self.redis_client.delete(session_id)
67
+ db = self.client["bot_database"]
68
+ db.session_id.drop()
69
  return None
70
+
71
  def clean_message(self, session_id: str) -> Optional[ChatMessage]:
72
  """Delete specific message for a session_id."""
73
  current_list = self.redis_client.lrange(session_id, 0, -1)
74
+
75
  indices_to_delete = []
76
  for index, item in enumerate(current_list):
77
  data = json.loads(item) # Parse JSON string to dict
78
 
79
  # Logic to determine if item should be removed
80
+ if (data.get("role") == "assistant" and data.get("content") is None) or (
81
+ data.get("role") == "tool"
82
+ ):
83
  indices_to_delete.append(index)
84
 
85
  # Remove elements by their indices in reverse order
86
  for index in reversed(indices_to_delete):
87
+ self.redis_client.lrem(
88
+ session_id, 1, current_list[index]
89
+ ) # Remove the element from the list in Redis
90
 
91
  def get_keys(self) -> List[str]:
92
  """Get all keys."""
93
+ try:
94
  print(self.redis_client.keys("*"))
95
  return [key.decode("utf-8") for key in self.redis_client.keys("*")]
96
+
97
  except Exception as e:
98
+ # Log the error and return JSONResponse for FastAPI
99
  print(f"An error occurred in update data.: {e}")
100
+ return JSONResponse(status_code=400, content="the error when get keys")
 
 
101
 
102
  def add_message(self, session_id: str, message: ChatMessage) -> None:
103
  """Add a message for a session_id."""
 
105
  self.redis_client.rpush(session_id, item)
106
 
107
  def _message_to_dict(self, message: ChatMessage) -> dict:
108
+ return message.model_dump()
109
+
110
+ def add_chat_history_to_redis(self, session_id: str) -> None:
111
+ """Fetch chat history from MongoDB and add it to Redis."""
112
+ db = self.client["bot_database"]
113
+ collection = db[session_id]
114
+
115
+ try:
116
+ chat_history = collection.find()
117
+ chat_history_list = [
118
+ {
119
+ key: message[key]
120
+ for key in message
121
+ if key not in ["_id", "timestamp"] and message[key] is not None
122
+ }
123
+ for message in chat_history
124
+ if message is not None
125
+ ]
126
+
127
+ for message in chat_history_list:
128
+ # Convert MongoDB document to the format you need
129
+ item = json.dumps(
130
+ self._message_to_dict(ChatMessage(**message))
131
+ ) # Convert message to dict
132
+ # Push to Redis
133
+ self.redis_client.rpush(session_id, item)
134
+ self.redis_client.expire(session_id, time=86400)
135
+
136
+ except Exception as e:
137
+ return JSONResponse(status_code=500, content="Add Database Error")
138
+
139
+ def get_all_messages_mongodb(self, session_id):
140
+ """Get all messages for a session_id from MongoDB."""
141
+ try:
142
+ db = self.client["bot_database"]
143
+ collection = db[session_id]
144
+
145
+ # Retrieve all documents from the collection
146
+ documents = collection.find()
147
+
148
+ # Convert the cursor to a list and exclude the _id field
149
+ documents_list = [
150
+ {key: doc[key] for key in doc if key !="_id" and doc[key] is not None}
151
+ for doc in documents
152
+ ]
153
+
154
+ # Print the list of documents without the _id field
155
+ print(documents_list) # Optional: If you want to see the output
156
+
157
+ return documents_list
158
+
159
+ except Exception as e:
160
+ print(f"An error occurred while retrieving messages: {e}")
161
+ return JSONResponse(status_code=500, content=f"An error occurred while retrieving messages: {e}")
core/chat/engine.py CHANGED
@@ -1,26 +1,19 @@
1
- from typing import Optional, List
2
  from llama_index.core.vector_stores import (
3
  MetadataFilter,
4
  MetadataFilters,
5
- FilterCondition,
6
  )
7
 
8
- from llama_index.core.memory import ChatMemoryBuffer
9
  from llama_index.core.tools import QueryEngineTool, ToolMetadata
10
  from llama_index.agent.openai import OpenAIAgent
11
  from llama_index.llms.openai import OpenAI
12
- from llama_index.storage.chat_store.redis import RedisChatStore
13
- from llama_index.core.memory import ChatMemoryBuffer
14
  from llama_index.core.query_engine import CitationQueryEngine
15
  from llama_index.core import Settings
16
  from core.chat.chatstore import ChatStore
17
 
18
- from service.dto import ChatMessage
19
  from config import GPTBOT_CONFIG
20
- from core.prompt import SYSTEM_BOT_TEMPLATE
21
- import redis
22
- import os
23
- import json
24
 
25
 
26
  class Engine:
@@ -33,48 +26,38 @@ class Engine:
33
  )
34
 
35
  self.chat_store = ChatStore()
36
-
37
  Settings.llm = self.llm
38
 
39
- def _build_description_bot(self, title, category):
40
- try:
41
- prompt = f"Write a detailed description for an OpenAI agent with the title '{title}' and categorized under '{category}'."
42
- description = self.llm.complete(prompt)
43
-
44
- return description
45
-
46
- except Exception as e:
47
- return f"Error generating description: {str(e)}"
48
 
49
- def get_citation_engine(self, title, category, index):
50
- filters = MetadataFilters(
51
- filters=[
52
- MetadataFilter(key="title", value=title),
53
- MetadataFilter(key="category", value=category),
54
- ],
55
- condition=FilterCondition.AND,
56
- )
57
 
58
  # Create the QueryEngineTool with the index and filters
59
  kwargs = {"similarity_top_k": 5, "filters": filters}
60
 
61
  retriever = index.as_retriever(**kwargs)
62
 
63
- citation_engine = CitationQueryEngine(retriever=retriever)
64
 
65
- return citation_engine
66
 
67
- def get_chat_engine(
68
- self, session_id, index, title=None, category=None, type="general"
69
- ):
70
  # Create the QueryEngineTool based on the type
71
- if type == "general":
72
  # query_engine = index.as_query_engine(similarity_top_k=3)
73
  citation_engine = CitationQueryEngine.from_args(index, similarity_top_k=5)
74
  description = "A book containing information about medicine"
75
  else:
76
- citation_engine = self.get_citation_engine(title, category, index)
77
- description = self._build_description_bot()
78
 
79
  metadata = ToolMetadata(name="bot-belajar", description=description)
80
  print(metadata)
@@ -85,12 +68,17 @@ class Engine:
85
  print(vector_query_engine)
86
 
87
  # Initialize the OpenAI agent with the tools
 
 
 
 
 
 
88
  chat_engine = OpenAIAgent.from_tools(
89
  tools=[vector_query_engine],
90
  llm=self.llm,
91
  memory=self.chat_store.initialize_memory_bot(session_id),
92
- # memory = self.initialize_memory_bot(session_id),
93
- system_prompt=SYSTEM_BOT_TEMPLATE,
94
  )
95
 
96
- return chat_engine
 
1
+ from typing import List
2
  from llama_index.core.vector_stores import (
3
  MetadataFilter,
4
  MetadataFilters,
 
5
  )
6
 
 
7
  from llama_index.core.tools import QueryEngineTool, ToolMetadata
8
  from llama_index.agent.openai import OpenAIAgent
9
  from llama_index.llms.openai import OpenAI
 
 
10
  from llama_index.core.query_engine import CitationQueryEngine
11
  from llama_index.core import Settings
12
  from core.chat.chatstore import ChatStore
13
 
 
14
  from config import GPTBOT_CONFIG
15
+ from core.prompt import SYSTEM_BOT_TEMPLATE, ADDITIONAL_INFORMATIONS
16
+ from core.parser import join_list
 
 
17
 
18
 
19
  class Engine:
 
26
  )
27
 
28
  self.chat_store = ChatStore()
 
29
  Settings.llm = self.llm
30
 
31
+ def get_citation_engine(self, titles:List, index):
32
+ filters = [
33
+ MetadataFilter(
34
+ key="title",
35
+ value=title,
36
+ operator="==",
37
+ )
38
+ for title in titles
39
+ ]
40
 
41
+ filters = MetadataFilters(filters=filters, condition="or")
 
 
 
 
 
 
 
42
 
43
  # Create the QueryEngineTool with the index and filters
44
  kwargs = {"similarity_top_k": 5, "filters": filters}
45
 
46
  retriever = index.as_retriever(**kwargs)
47
 
48
+ # citation_engine = CitationQueryEngine(retriever=retriever)
49
 
50
+ return CitationQueryEngine.from_args(index, retriever=retriever)
51
 
52
+ def get_chat_engine(self, session_id, index, titles=None, type_bot="general"):
 
 
53
  # Create the QueryEngineTool based on the type
54
+ if type_bot == "general":
55
  # query_engine = index.as_query_engine(similarity_top_k=3)
56
  citation_engine = CitationQueryEngine.from_args(index, similarity_top_k=5)
57
  description = "A book containing information about medicine"
58
  else:
59
+ citation_engine = self.get_citation_engine(titles, index)
60
+ description = "A book containing information about medicine"
61
 
62
  metadata = ToolMetadata(name="bot-belajar", description=description)
63
  print(metadata)
 
68
  print(vector_query_engine)
69
 
70
  # Initialize the OpenAI agent with the tools
71
+
72
+ if type_bot == "general":
73
+ system_prompt = SYSTEM_BOT_TEMPLATE.format(additional_information="")
74
+ else:
75
+ additional_information = ADDITIONAL_INFORMATIONS.format(titles=join_list(titles))
76
+ system_prompt = SYSTEM_BOT_TEMPLATE.format(additional_information=additional_information)
77
  chat_engine = OpenAIAgent.from_tools(
78
  tools=[vector_query_engine],
79
  llm=self.llm,
80
  memory=self.chat_store.initialize_memory_bot(session_id),
81
+ system_prompt=system_prompt,
 
82
  )
83
 
84
+ return chat_engine
core/parser.py CHANGED
@@ -90,3 +90,13 @@ def seperate_to_list(text):
90
  final_output.extend([part.strip() for part in split_line if part.strip()])
91
 
92
  return final_output
 
 
 
 
 
 
 
 
 
 
 
90
  final_output.extend([part.strip() for part in split_line if part.strip()])
91
 
92
  return final_output
93
+
94
+ def join_list(items):
95
+ if not items:
96
+ return ""
97
+ elif len(items) == 1:
98
+ return items[0]
99
+ elif len(items) == 2:
100
+ return f"{items[0]} and {items[1]}"
101
+ else:
102
+ return ", ".join(items[:-1]) + " and " + items[-1]
core/prompt.py CHANGED
@@ -1,17 +1,21 @@
1
  SYSTEM_BOT_TEMPLATE = """
2
- Kamu adalah Medbot yang gunakan tool kamu untuk menjawab pertanyaan tentang kedokteran. Tugasmu adalah memberikan jawaban yang informatif dan akurat berdasarkan tools yang tersediaserta selalu cantumkan kutipan dari teks yang anda kutip. Jika tidak ada jawaban melalui alat yang digunakan, carilah informasi lebih lanjut dengan menggunakan alat. Jika setelah itu tidak ada informasi yang ditemukan, katakan bahwa kamu tidak mengetahuinya.
3
 
4
  **Instruksi**:
5
 
6
- 1. **Jawaban Berdasarkan Tools**: Jika pengguna bertanya tentang topik kedokteran, gunakanlah tools yang tersedia untuk memberikan jawaban. Pastikan jawabanmu relevan dan sesuai dengan informasi dari tools tersebut.
7
 
8
  2. **Referensi dan Kutipan**: Jangan menghapus sumber kutipan dari teks yang diberikan. Contohnya, jika teksnya adalah "Ilmu kedokteran sangat dibutuhkan [2]", pastikan untuk menyertakan kutipan sumbernya yaitu [2] dalam jawabanmu.
9
 
10
  3. **Ketika Tidak Tahu Jawaban**: Jika pertanyaan pengguna tidak dapat dijawab dengan menggunakan tools ini, sampaikan dengan sopan bahwa kamu tidak memiliki jawaban untuk pertanyaan tersebut. Arahkan pengguna untuk mencari informasi lebih lanjut atau bertanya pada ahli di bidang kedokteran.
11
 
12
- 4. **Gaya Jawaban**: Berikan jawaban dengan gaya yang ramah dan profesional. Hindari penggunaan poin-poin, dan sampaikan informasi secara naratif agar lebih mudah dipahami. Gunakan kata 'dok' atau 'dokter' untuk merujuk pada dokter, dan hindari kesan monoton dengan menambahkan emotikon jika sesuai.
13
 
14
- 5. **Penutup**: Akhiri komunikasi dengan kalimat yang friendly, seperti "Semoga informasi ini bermanfaat, dok ✨" atau "Jika ada pertanyaan lain, jangan ragu untuk bertanya ya dok 😊"
 
 
 
 
15
  """
16
 
17
  SYSTEM_TOPIC_TEMPLATE = """
@@ -100,7 +104,7 @@ Your task is to extract and organize metadata for the {class_name}. Follow the i
100
 
101
  2. **Extract the Key Subtopic (if applicable):**
102
  - **Goal:** Determine the most relevant supporting element related to the main topic.
103
- - **How:** Identify a sub-element or detail that provides additional depth or clarification to the main topic.
104
  - **Tip:** Ensure the subtopic directly supports or elaborates on the main topic.
105
 
106
  3. **Handle Cases Without a Clear Subtopic:**
 
1
  SYSTEM_BOT_TEMPLATE = """
2
+ Kamu adalah Medbot yang selalu menggunakan tools kamu untuk menjawab pertanyaan tentang kedokteran. Tugasmu adalah memberikan jawaban yang informatif dan akurat berdasarkan tools yang tersedia. {additional_information} Jika setelah itu tidak ada informasi yang ditemukan, katakan bahwa kamu tidak mengetahuinya dan berikan informasi dari apa yang kamu ketahui kemudian arahkan pengguna untuk bertanya ke dokter yang lebih ahli.
3
 
4
  **Instruksi**:
5
 
6
+ 1. **Jawaban Berdasarkan Tools**: Jika pengguna bertanya tentang topik kedokteran, gunakanlah tools yang tersedia untuk memberikan jawaban. Pastikan jawabanmu relevan dan sesuai dengan informasi dari tools tersebut. Jelaskan informasi dengan content dan lengkap.
7
 
8
  2. **Referensi dan Kutipan**: Jangan menghapus sumber kutipan dari teks yang diberikan. Contohnya, jika teksnya adalah "Ilmu kedokteran sangat dibutuhkan [2]", pastikan untuk menyertakan kutipan sumbernya yaitu [2] dalam jawabanmu.
9
 
10
  3. **Ketika Tidak Tahu Jawaban**: Jika pertanyaan pengguna tidak dapat dijawab dengan menggunakan tools ini, sampaikan dengan sopan bahwa kamu tidak memiliki jawaban untuk pertanyaan tersebut. Arahkan pengguna untuk mencari informasi lebih lanjut atau bertanya pada ahli di bidang kedokteran.
11
 
12
+ 4. **Gaya Jawaban**: Berikan jawaban dengan gaya yang ramah dan profesional. Sampaikan informasi secara naratif agar lebih mudah dipahami. Boleh menggunakan point point dan uraiannya agar bisa menjelaskan informasi yang kompleks sehingga mudah dipahami. Gunakan kata 'dok' atau 'dokter' untuk merujuk pada dokter, dan hindari kesan monoton dengan menambahkan emotikon jika sesuai seperti 😁, 😊, 🙌, 😉, 😀, 🤔, 😇.
13
 
14
+ 5. **Penutup**: Akhiri komunikasi dengan kalimat yang friendly, seperti "Semoga informasi ini bermanfaat, dok ✨" atau "Jika ada pertanyaan lain, jangan ragu untuk bertanya ya dok 😊" dan sebagainya.
15
+ """
16
+
17
+ ADDITIONAL_INFORMATIONS = """
18
+ Kemudian, kamu menjawab pertanyan user dari buku {titles}, jadi jika user bertaya kamu pastikan akan mengacu buku tersebut yang didapatkan dari tools dari yang kamu punya.
19
  """
20
 
21
  SYSTEM_TOPIC_TEMPLATE = """
 
104
 
105
  2. **Extract the Key Subtopic (if applicable):**
106
  - **Goal:** Determine the most relevant supporting element related to the main topic.
107
+ - **How:** Identify a sub-element or content that provides additional depth or clarification to the main topic.
108
  - **Tip:** Ensure the subtopic directly supports or elaborates on the main topic.
109
 
110
  3. **Handle Cases Without a Clear Subtopic:**
core/summarization/summarizer.py CHANGED
@@ -3,7 +3,7 @@ import os
3
  import base64
4
  import fitz
5
 
6
- from fastapi import HTTPException
7
  from llama_index.core.vector_stores import (
8
  MetadataFilter,
9
  MetadataFilters,
@@ -39,7 +39,7 @@ class SummarizeGenerator:
39
  print(content_table)
40
  # content_table = fitz.open(topics_image)
41
  except Exception as e:
42
- raise HTTPException(status_code=400, detail=f"Error opening PDF file: {e}")
43
 
44
  # Initialize a list to collect base64 encoded images
45
  pix_encoded_combined = []
@@ -57,7 +57,7 @@ class SummarizeGenerator:
57
  continue # Skip to the next page if there's an error
58
 
59
  if not pix_encoded_combined:
60
- raise HTTPException(status_code=404, detail="No images found in the PDF")
61
 
62
  return pix_encoded_combined
63
 
@@ -102,7 +102,7 @@ class SummarizeGenerator:
102
  return str(refined_extractor_output), extractor_dics
103
 
104
  except Exception as e:
105
- raise HTTPException(status_code=500, detail=f"An error occurred: {e}")
106
 
107
  def _extract_image_as_base64(self, page):
108
  try:
@@ -110,7 +110,7 @@ class SummarizeGenerator:
110
  pix_bytes = pix.tobytes()
111
  return base64.b64encode(pix_bytes).decode("utf-8")
112
  except Exception as e:
113
- raise HTTPException(status_code=500, detail=f"Error extracting image: {e}")
114
 
115
  def index_summarizer_engine(self, topic, subtopic, index):
116
  filters = MetadataFilters(
 
3
  import base64
4
  import fitz
5
 
6
+ from fastapi.responses import JSONResponse
7
  from llama_index.core.vector_stores import (
8
  MetadataFilter,
9
  MetadataFilters,
 
39
  print(content_table)
40
  # content_table = fitz.open(topics_image)
41
  except Exception as e:
42
+ return JSONResponse(status_code=400, content=f"Error opening PDF file: {e}")
43
 
44
  # Initialize a list to collect base64 encoded images
45
  pix_encoded_combined = []
 
57
  continue # Skip to the next page if there's an error
58
 
59
  if not pix_encoded_combined:
60
+ return JSONResponse(status_code=404, content="No images found in the PDF")
61
 
62
  return pix_encoded_combined
63
 
 
102
  return str(refined_extractor_output), extractor_dics
103
 
104
  except Exception as e:
105
+ return JSONResponse(status_code=500, content=f"An error occurred: {e}")
106
 
107
  def _extract_image_as_base64(self, page):
108
  try:
 
110
  pix_bytes = pix.tobytes()
111
  return base64.b64encode(pix_bytes).decode("utf-8")
112
  except Exception as e:
113
+ return JSONResponse(status_code=500, content=f"Error extracting image: {e}")
114
 
115
  def index_summarizer_engine(self, topic, subtopic, index):
116
  filters = MetadataFilters(
db/database.py ADDED
@@ -0,0 +1,58 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from sqlalchemy import create_engine
2
+ from sqlalchemy.orm import sessionmaker
3
+ from sqlalchemy.exc import OperationalError
4
+ from config import MYSQL_CONFIG
5
+ from fastapi import HTTPException
6
+ import os
7
+ import base64
8
+
9
+
10
+ SQLALCHEMY_DATABASE_URL = MYSQL_CONFIG.DB_URI_SQL_ALCHEMY
11
+
12
+ # Get the base64 encoded certificate from the environment variable
13
+ ca_cert_base64 = os.getenv("CA_CERT_BASE64")
14
+
15
+ # Decode the base64 content
16
+ if ca_cert_base64:
17
+ ca_cert_content = base64.b64decode(ca_cert_base64).decode("utf-8")
18
+
19
+ # Write the decoded content to a temporary .pem file
20
+ with open("/tmp/ca.pem", "w") as f:
21
+ f.write(ca_cert_content)
22
+
23
+ ca_cert_path = "/tmp/ca.pem"
24
+ else:
25
+ raise ValueError("CA_CERT_BASE64 environment variable is not set")
26
+
27
+ # Use the decoded CA certificate in the SQLAlchemy engine
28
+ engine = create_engine(
29
+ SQLALCHEMY_DATABASE_URL,
30
+ connect_args={
31
+ "ssl": {
32
+ "sslmode": "REQUIRED",
33
+ "ca": ca_cert_path, # Path to the temporary CA certificate
34
+ # Add other SSL options as needed
35
+ }
36
+ },
37
+ )
38
+
39
+ SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
40
+
41
+
42
+ def get_db():
43
+ db = SessionLocal()
44
+ try:
45
+ yield db
46
+ except OperationalError as e:
47
+ # Log the error and raise HTTPException for FastAPI
48
+ print(f"An error occurred in get database sql alchemy.: {e}")
49
+ raise HTTPException(status_code=400, detail="Database connection error")
50
+ # Check if it's an authentication-related error
51
+ except Exception as e:
52
+ # Check if it's an authentication-related error
53
+ if "401" in str(e):
54
+ raise HTTPException(status_code=401, detail="Authentication failed")
55
+ else:
56
+ # For any other type of exception, raise a generic 400 error
57
+ print(f"An error occurred: {e}")
58
+ raise HTTPException(status_code=400, detail="An unexpected error occurred")
db/delete_data.py CHANGED
@@ -10,7 +10,7 @@ class DeleteDatabase(Repository):
10
  if "id" not in params:
11
  raise ValueError("The 'id' parameter is required.")
12
  query = """
13
- DELETE FROM Metadata
14
  WHERE id = :id
15
  """
16
 
 
10
  if "id" not in params:
11
  raise ValueError("The 'id' parameter is required.")
12
  query = """
13
+ DELETE FROM metadata
14
  WHERE id = :id
15
  """
16
 
db/get_data.py CHANGED
@@ -1,5 +1,6 @@
1
  import logging
2
  from db.repository import Repository, get_db_conn
 
3
 
4
  # Setup logging (configure as needed)
5
  logging.basicConfig(level=logging.INFO)
@@ -23,14 +24,14 @@ class GetDatabase(Repository):
23
  return results if results else None
24
  except Exception as e:
25
  logging.error(f"An error occurred while executing query: {e}")
26
- return None
27
 
28
  async def get_data(self, title):
29
  """
30
  Fetch the first result matching the given title from the metadata table.
31
  """
32
  query = """
33
- SELECT * FROM Metadata
34
  WHERE title = %s
35
  limit 5;
36
  """
@@ -40,21 +41,22 @@ class GetDatabase(Repository):
40
  return results
41
  except Exception as e:
42
  logging.error(f"An error occurred while get data: {e}")
43
- return None
44
 
45
  async def get_all_data(self):
46
  """
47
  Fetch all data from the metadata table.
48
  """
49
  query = """
50
- SELECT * FROM Metadata
51
  """
52
  results = await self.execute_query(query)
 
53
  return results
54
 
55
  async def get_data_by_id(self, id):
56
  query = f"""
57
- SELECT * FROM Metadata WHERE id = :id
58
  """
59
 
60
  param = {"id" : id}
@@ -64,4 +66,4 @@ class GetDatabase(Repository):
64
  return results[0] if results else None
65
  except Exception as e:
66
  print('Error fetching data by ID %s: %s', id, e)
67
- return None
 
1
  import logging
2
  from db.repository import Repository, get_db_conn
3
+ from fastapi.responses import JSONResponse
4
 
5
  # Setup logging (configure as needed)
6
  logging.basicConfig(level=logging.INFO)
 
24
  return results if results else None
25
  except Exception as e:
26
  logging.error(f"An error occurred while executing query: {e}")
27
+ return JSONResponse(status_code=500, content=f"An error occurred while executing query: {e}")
28
 
29
  async def get_data(self, title):
30
  """
31
  Fetch the first result matching the given title from the metadata table.
32
  """
33
  query = """
34
+ SELECT * FROM metadata
35
  WHERE title = %s
36
  limit 5;
37
  """
 
41
  return results
42
  except Exception as e:
43
  logging.error(f"An error occurred while get data: {e}")
44
+ return JSONResponse(status_code=500, content=f"An error occurred while get data: {e}")
45
 
46
  async def get_all_data(self):
47
  """
48
  Fetch all data from the metadata table.
49
  """
50
  query = """
51
+ SELECT * FROM metadata
52
  """
53
  results = await self.execute_query(query)
54
+ print("result", results)
55
  return results
56
 
57
  async def get_data_by_id(self, id):
58
  query = f"""
59
+ SELECT * FROM metadata WHERE id = :id
60
  """
61
 
62
  param = {"id" : id}
 
66
  return results[0] if results else None
67
  except Exception as e:
68
  print('Error fetching data by ID %s: %s', id, e)
69
+ return JSONResponse(status_code=500, content=f"An error while fething data: {e}")
db/models.py ADDED
@@ -0,0 +1,164 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Literal
2
+ from typing_extensions import Annotated
3
+ import sqlalchemy
4
+ from sqlalchemy.orm import mapped_column
5
+ from sqlalchemy import Integer, String, ForeignKey,func, DateTime, Boolean, LargeBinary
6
+ from sqlalchemy.orm import DeclarativeBase, Mapped
7
+ import uuid
8
+ import datetime
9
+ import pytz
10
+
11
+ # Set Jakarta timezone
12
+ jakarta_tz = pytz.timezone('Asia/Jakarta')
13
+
14
+ def get_jakarta_time():
15
+ return datetime.datetime.now(jakarta_tz)
16
+
17
+ # Use the timezone-aware function in SQLAlchemy annotations
18
+ timestamp_current = Annotated[
19
+ datetime.datetime,
20
+ mapped_column(nullable=False, default=get_jakarta_time) # Use default instead of server_default
21
+ ]
22
+
23
+ timestamp_update = Annotated[
24
+ datetime.datetime,
25
+ mapped_column(nullable=False, default=get_jakarta_time, onupdate=get_jakarta_time) # onupdate uses the Python function
26
+ ]
27
+
28
+ message_role = Literal["user", "assistant"]
29
+
30
+ class Base(DeclarativeBase):
31
+ type_annotation_map = {
32
+ message_role: sqlalchemy.Enum("user", "assistant", name="message_role"),
33
+ }
34
+
35
+ class User(Base):
36
+ __tablename__ = "user"
37
+
38
+ id = mapped_column(Integer, primary_key=True)
39
+ name = mapped_column(String(100), nullable=False)
40
+ username = mapped_column(String(100), unique=True, nullable=False)
41
+ role_id = mapped_column(Integer, ForeignKey("role.id"))
42
+ email = mapped_column(String(100), unique=True, nullable=False)
43
+ password_hash = mapped_column(String(100), nullable=False)
44
+ created_at: Mapped[timestamp_current]
45
+ updated_at : Mapped[timestamp_update]
46
+
47
+ class Feedback(Base):
48
+ __tablename__ = "feedback"
49
+
50
+ id = mapped_column(Integer, primary_key=True)
51
+ user_id = mapped_column(Integer, ForeignKey("user.id"))
52
+ rating = mapped_column(Integer)
53
+ comment = mapped_column(String(1000))
54
+ created_at : Mapped[timestamp_current]
55
+
56
+ class Role(Base):
57
+ __tablename__ = "role"
58
+
59
+ id = mapped_column(Integer, primary_key=True)
60
+ role_name = mapped_column(String(200), nullable=False)
61
+ description = mapped_column(String(200))
62
+
63
+ class User_Role(Base):
64
+ __tablename__ = "user_role"
65
+
66
+ id = mapped_column(Integer, primary_key=True)
67
+ user_id = mapped_column(Integer, ForeignKey("user.id"))
68
+ role_id = mapped_column(Integer, ForeignKey("role.id"))
69
+
70
+ class Bot(Base):
71
+ __tablename__ = "bot"
72
+
73
+ id = mapped_column(Integer, primary_key=True)
74
+ user_id = mapped_column(Integer, ForeignKey("user.id"))
75
+ bot_name = mapped_column(String(200), nullable=False)
76
+ created_at : Mapped[timestamp_current]
77
+
78
+ class Session(Base):
79
+ __tablename__ = "session"
80
+
81
+ id = mapped_column(String(36), primary_key=True, index=True, default=lambda: str(uuid.uuid4())) # Store as string
82
+ user_id = mapped_column(Integer, ForeignKey("user.id"))
83
+ bot_id = mapped_column(Integer, ForeignKey("bot.id"))
84
+ created_at : Mapped[timestamp_current]
85
+ updated_at : Mapped[timestamp_update]
86
+
87
+ class Message(Base):
88
+ __tablename__ = "message"
89
+
90
+ id = mapped_column(Integer, primary_key=True)
91
+ session_id = mapped_column(String(36), ForeignKey("session.id"), nullable=False) # Store as string
92
+ role : Mapped[message_role]
93
+ goal = mapped_column(String(200))
94
+ created_at : Mapped[timestamp_current]
95
+
96
+ class Category(Base):
97
+ __tablename__ = "category"
98
+
99
+ id = mapped_column(Integer, primary_key=True)
100
+ category = mapped_column(String(200))
101
+ created_at : Mapped[timestamp_current]
102
+ updated_at : Mapped[timestamp_update]
103
+
104
+ class Metadata(Base):
105
+ __tablename__ = "metadata"
106
+
107
+ id = mapped_column(Integer, primary_key=True)
108
+ title = mapped_column(String(200))
109
+ # image_data = mapped_column(LargeBinary, nullable=True)
110
+ category_id = mapped_column(Integer, ForeignKey("category.id"))
111
+ author = mapped_column(String(200))
112
+ year = mapped_column(Integer)
113
+ publisher = mapped_column(String(100))
114
+ thumbnail = mapped_column(LargeBinary, nullable=True)
115
+ created_at : Mapped[timestamp_current]
116
+ updated_at : Mapped[timestamp_update]
117
+
118
+
119
+ class Bot_Meta(Base):
120
+ __tablename__ = "bot_meta"
121
+
122
+ id = mapped_column(Integer, primary_key=True)
123
+ bot_id = mapped_column(Integer, ForeignKey("bot.id"))
124
+ metadata_id = mapped_column(Integer, ForeignKey("metadata.id"))
125
+ created_at : Mapped[timestamp_current]
126
+ updated_at : Mapped[timestamp_update]
127
+
128
+ class User_Meta(Base):
129
+ __tablename__ = "user_meta"
130
+
131
+ id = mapped_column(Integer, primary_key=True)
132
+ user_id = mapped_column(Integer, ForeignKey("user.id"))
133
+ metadata_id = mapped_column(Integer, ForeignKey("metadata.id"))
134
+ created_at : Mapped[timestamp_current]
135
+ updated_at : Mapped[timestamp_update]
136
+
137
+ class Planning(Base):
138
+ __tablename__="planning"
139
+
140
+ id = mapped_column(Integer, primary_key=True)
141
+ trials_id = mapped_column(Integer, ForeignKey("trials.id"))
142
+ planning_name = mapped_column(String(200), nullable=False)
143
+ duration = mapped_column(Integer, nullable=False) # Duration in months
144
+ start_date = mapped_column(DateTime, nullable=False) # Start date of the planning
145
+ end_date = mapped_column(DateTime, nullable=False) # End date of the planning
146
+ is_activated = mapped_column(Boolean)
147
+ created_at : Mapped[timestamp_current] # Automatically sets the current timestamp
148
+
149
+ class Trials(Base):
150
+ __tablename__ = "trials"
151
+
152
+ id = mapped_column(Integer, primary_key=True)
153
+ token_used = mapped_column(Integer, nullable=False) # Adjust length as needed
154
+ token_planned = mapped_column(Integer, nullable=False)
155
+
156
+
157
+ class Session_Publisher(Base):
158
+ __tablename__ = "session_publisher"
159
+
160
+ id = mapped_column(String(36), primary_key=True, index=True, default=lambda: str(uuid.uuid4())) # Store as string
161
+ user_id = mapped_column(Integer, ForeignKey("user.id"))
162
+ metadata_id = mapped_column(Integer, ForeignKey("metadata.id"))
163
+ created_at : Mapped[timestamp_current]
164
+ updated_at : Mapped[timestamp_update]
db/save_data.py CHANGED
@@ -3,21 +3,30 @@ import logging
3
  from dotenv import load_dotenv
4
  from db.repository import Repository
5
 
 
6
  load_dotenv()
7
 
8
 
9
  class InsertDatabase(Repository):
10
 
11
  # Example function to insert data asynchronously
12
- async def insert_data(self, params):
13
  # SQL insert query with named placeholders
14
  query = """
15
- INSERT INTO Metadata (title, category, author, year, publisher, createdAt, updatedAt)
16
- VALUES (:title, :category, :author, :year, :publisher, :createdAt, :updatedAt)
17
  """
18
 
19
- reference = self.update_params(params)
 
 
 
 
 
 
 
20
 
 
21
  try:
22
  # Execute the query with the provided values
23
  await self._exec(query, reference)
 
3
  from dotenv import load_dotenv
4
  from db.repository import Repository
5
 
6
+
7
  load_dotenv()
8
 
9
 
10
  class InsertDatabase(Repository):
11
 
12
  # Example function to insert data asynchronously
13
+ async def insert_data(self, params, category_id):
14
  # SQL insert query with named placeholders
15
  query = """
16
+ INSERT INTO metadata (title, category_id, author, year, publisher)
17
+ VALUES (:title, :category_id, :author, :year, :publisher)
18
  """
19
 
20
+ reference = {
21
+ "title": params["title"],
22
+ "category_id": category_id, # directly assign category_id
23
+ "author": params["author"],
24
+ "year": params["year"],
25
+ "publisher": params["publisher"]
26
+ }
27
+
28
 
29
+ print(reference)
30
  try:
31
  # Execute the query with the provided values
32
  await self._exec(query, reference)
db/update_data.py CHANGED
@@ -7,30 +7,29 @@ logging.basicConfig(level=logging.INFO)
7
 
8
  class UpdateDatabase(Repository):
9
  async def update_record(self, reference):
 
10
  if "id" not in reference:
11
  raise ValueError("The 'id' parameter is required.")
12
  query = """
13
- UPDATE Metadata
14
  SET title = :title,
15
- category = :category,
16
  author = :author,
17
  year = :year,
18
- publisher = :publisher,
19
- updatedAt = :updatedAt
20
  WHERE id = :id
21
  """
22
  print(query)
23
 
24
- updated_reference = self.update_params(reference, update=True)
25
- print(updated_reference)
26
 
27
  try:
28
- await self._exec(query, updated_reference)
29
  logging.info(
30
- f"Record with id {updated_reference['id']} updated successfully."
31
  )
32
  except Exception as e:
33
  logging.error(
34
- f"Error updating record with id {updated_reference['id']}: {e}"
35
  )
36
  raise
 
7
 
8
  class UpdateDatabase(Repository):
9
  async def update_record(self, reference):
10
+ print("update record", reference)
11
  if "id" not in reference:
12
  raise ValueError("The 'id' parameter is required.")
13
  query = """
14
+ UPDATE metadata
15
  SET title = :title,
16
+ category_id = :category_id,
17
  author = :author,
18
  year = :year,
19
+ publisher = :publisher
 
20
  WHERE id = :id
21
  """
22
  print(query)
23
 
24
+ print(reference)
 
25
 
26
  try:
27
+ await self._exec(query, reference)
28
  logging.info(
29
+ f"Record with id {reference['id']} updated successfully."
30
  )
31
  except Exception as e:
32
  logging.error(
33
+ f"Error updating record with id {reference['id']}: {e}"
34
  )
35
  raise
helper/bot_function.py ADDED
File without changes