Spaces:
Build error
Build error
File size: 7,194 Bytes
51ff9e5 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 |
import json
import pytest
from openhands.storage.conversation.file_conversation_store import FileConversationStore
from openhands.storage.data_models.conversation_metadata import ConversationMetadata
from openhands.storage.locations import get_conversation_metadata_filename
from openhands.storage.memory import InMemoryFileStore
@pytest.mark.asyncio
async def test_load_store():
store = FileConversationStore(InMemoryFileStore({}))
expected = ConversationMetadata(
conversation_id='some-conversation-id',
user_id='some-user-id',
selected_repository='some-repo',
title="Let's talk about trains",
)
await store.save_metadata(expected)
found = await store.get_metadata('some-conversation-id')
assert expected == found
@pytest.mark.asyncio
async def test_load_int_user_id():
store = FileConversationStore(
InMemoryFileStore(
{
get_conversation_metadata_filename('some-conversation-id'): json.dumps(
{
'conversation_id': 'some-conversation-id',
'user_id': '67890',
'selected_repository': 'some-repo',
'title': "Let's talk about trains",
'created_at': '2025-01-16T19:51:04.886331Z',
}
)
}
)
)
found = await store.get_metadata('some-conversation-id')
assert found.user_id == '67890'
@pytest.mark.asyncio
async def test_search_empty():
store = FileConversationStore(InMemoryFileStore({}))
result = await store.search()
assert len(result.results) == 0
assert result.next_page_id is None
@pytest.mark.asyncio
async def test_search_basic():
# Create test data with 3 conversations at different dates
store = FileConversationStore(
InMemoryFileStore(
{
get_conversation_metadata_filename('conv1'): json.dumps(
{
'conversation_id': 'conv1',
'user_id': '123',
'selected_repository': 'repo1',
'title': 'First conversation',
'created_at': '2025-01-16T19:51:04Z',
}
),
get_conversation_metadata_filename('conv2'): json.dumps(
{
'conversation_id': 'conv2',
'user_id': '123',
'selected_repository': 'repo1',
'title': 'Second conversation',
'created_at': '2025-01-17T19:51:04Z',
}
),
get_conversation_metadata_filename('conv3'): json.dumps(
{
'conversation_id': 'conv3',
'user_id': '123',
'selected_repository': 'repo1',
'title': 'Third conversation',
'created_at': '2025-01-15T19:51:04Z',
}
),
}
)
)
result = await store.search()
assert len(result.results) == 3
# Should be sorted by date, newest first
assert result.results[0].conversation_id == 'conv2'
assert result.results[1].conversation_id == 'conv1'
assert result.results[2].conversation_id == 'conv3'
assert result.next_page_id is None
@pytest.mark.asyncio
async def test_search_pagination():
# Create test data with 5 conversations
store = FileConversationStore(
InMemoryFileStore(
{
get_conversation_metadata_filename(f'conv{i}'): json.dumps(
{
'conversation_id': f'conv{i}',
'user_id': '123',
'selected_repository': 'repo1',
'title': f'ServerConversation {i}',
'created_at': f'2025-01-{15 + i}T19:51:04Z',
}
)
for i in range(1, 6)
}
)
)
# Test with limit of 2
result = await store.search(limit=2)
assert len(result.results) == 2
assert result.results[0].conversation_id == 'conv5' # newest first
assert result.results[1].conversation_id == 'conv4'
assert result.next_page_id is not None
# Get next page using the next_page_id
result2 = await store.search(page_id=result.next_page_id, limit=2)
assert len(result2.results) == 2
assert result2.results[0].conversation_id == 'conv3'
assert result2.results[1].conversation_id == 'conv2'
assert result2.next_page_id is not None
# Get last page
result3 = await store.search(page_id=result2.next_page_id, limit=2)
assert len(result3.results) == 1
assert result3.results[0].conversation_id == 'conv1'
assert result3.next_page_id is None
@pytest.mark.asyncio
async def test_search_with_invalid_conversation():
# Test handling of invalid conversation data
store = FileConversationStore(
InMemoryFileStore(
{
get_conversation_metadata_filename('conv1'): json.dumps(
{
'conversation_id': 'conv1',
'user_id': '123',
'selected_repository': 'repo1',
'title': 'Valid conversation',
'created_at': '2025-01-16T19:51:04Z',
}
),
get_conversation_metadata_filename(
'conv2'
): 'invalid json', # Invalid conversation
}
)
)
result = await store.search()
# Should return only the valid conversation
assert len(result.results) == 1
assert result.results[0].conversation_id == 'conv1'
assert result.next_page_id is None
@pytest.mark.asyncio
async def test_get_all_metadata():
store = FileConversationStore(
InMemoryFileStore(
{
get_conversation_metadata_filename('conv1'): json.dumps(
{
'conversation_id': 'conv1',
'user_id': '123',
'selected_repository': 'repo1',
'title': 'First conversation',
'created_at': '2025-01-16T19:51:04Z',
}
),
get_conversation_metadata_filename('conv2'): json.dumps(
{
'conversation_id': 'conv2',
'user_id': '123',
'selected_repository': 'repo1',
'title': 'Second conversation',
'created_at': '2025-01-17T19:51:04Z',
}
),
}
)
)
results = await store.get_all_metadata(['conv1', 'conv2'])
assert len(results) == 2
assert results[0].conversation_id == 'conv1'
assert results[0].title == 'First conversation'
assert results[1].conversation_id == 'conv2'
assert results[1].title == 'Second conversation'
|