code
stringlengths
66
870k
docstring
stringlengths
19
26.7k
func_name
stringlengths
1
138
language
stringclasses
1 value
repo
stringlengths
7
68
path
stringlengths
5
324
url
stringlengths
46
389
license
stringclasses
7 values
def __getitem__(self, name): """Attempt to parse date arithmetic syntax and apply to run_time.""" run_time = self.job_run.run_time time_value = tron_timeutils.DateArithmetic.parse(name, run_time) if time_value: return time_value raise KeyError(name)
Attempt to parse date arithmetic syntax and apply to run_time.
__getitem__
python
Yelp/paasta
paasta_tools/tron/tron_command_context.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/tron/tron_command_context.py
Apache-2.0
def delta_total_seconds(td): """Equivalent to timedelta.total_seconds() available in Python 2.7.""" microseconds, seconds, days = td.microseconds, td.seconds, td.days return (microseconds + (seconds + days * 24 * 3600) * 10**6) / 10**6
Equivalent to timedelta.total_seconds() available in Python 2.7.
delta_total_seconds
python
Yelp/paasta
paasta_tools/tron/tron_timeutils.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/tron/tron_timeutils.py
Apache-2.0
def macro_timedelta(start_date, years=0, months=0, days=0, hours=0): """Since datetime doesn't provide timedeltas at the year or month level, this function generates timedeltas of the appropriate sizes. """ delta = datetime.timedelta(days=days, hours=hours) new_month = start_date.month + months while new_month > 12: new_month -= 12 years += 1 while new_month < 1: new_month += 12 years -= 1 end_date = datetime.datetime( start_date.year + years, new_month, start_date.day, start_date.hour ) delta += end_date - start_date return delta
Since datetime doesn't provide timedeltas at the year or month level, this function generates timedeltas of the appropriate sizes.
macro_timedelta
python
Yelp/paasta
paasta_tools/tron/tron_timeutils.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/tron/tron_timeutils.py
Apache-2.0
def duration(start_time, end_time=None): """Get a timedelta between end_time and start_time, where end_time defaults to now(). """ if not start_time: return None last_time = end_time if end_time else current_time() return last_time - start_time
Get a timedelta between end_time and start_time, where end_time defaults to now().
duration
python
Yelp/paasta
paasta_tools/tron/tron_timeutils.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/tron/tron_timeutils.py
Apache-2.0
def parse(cls, date_str, dt=None): """Parse a date arithmetic pattern (Ex: 'shortdate-1'). Supports date strings: shortdate, year, month, day, unixtime, daynumber. Supports subtraction and addition operations of integers. Time unit is based on date format (Ex: seconds for unixtime, days for day). """ dt = dt or current_time() match = cls.DATE_TYPE_PATTERN.match(date_str) if not match: return attr, value = match.groups() delta = int(value) if value else 0 if attr in ("shortdate", "year", "month", "day", "hour"): if delta: kwargs = {"days" if attr == "shortdate" else attr + "s": delta} dt += macro_timedelta(dt, **kwargs) return dt.strftime(cls.DATE_FORMATS[attr]) if attr == "unixtime": return int(to_timestamp(dt)) + delta if attr == "daynumber": return dt.toordinal() + delta
Parse a date arithmetic pattern (Ex: 'shortdate-1'). Supports date strings: shortdate, year, month, day, unixtime, daynumber. Supports subtraction and addition operations of integers. Time unit is based on date format (Ex: seconds for unixtime, days for day).
parse
python
Yelp/paasta
paasta_tools/tron/tron_timeutils.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/tron/tron_timeutils.py
Apache-2.0
async def test_async_ttl_cache_dont_overwrite_new_cache_entry(): """Make sure that we don't overwrite a new cache entry that was placed while we were waiting to handle the result of a previously cached future """ range_continue_event = asyncio.Event() update_cache_event = asyncio.Event() return_values = iter(range(10)) # Wait until awaiter has had a chance to get the in-flight future out of # the cache, then signal to the cache_updater to replace the cached future # before returning. Because cache_updater is signalled first, it will # replace the previously cached future before async_ttl_cache decides # whether save the result of that future in the cache async def range_coroutine(): await range_continue_event.wait() update_cache_event.set() return next(return_values) range_coroutine_future = asyncio.ensure_future(range_coroutine()) cache_key = functools._make_key((), {}, typed=False) cache = {cache_key: (range_coroutine_future, float("Inf"))} cached_range_coroutine = async_ttl_cache(cache=cache, ttl=0)(range_coroutine) new_range_coroutine_future = asyncio.ensure_future(range_coroutine()) async def awaiter(): range_continue_event.set() await cached_range_coroutine() async def cache_updater(): await update_cache_event.wait() cache[cache_key] = (new_range_coroutine_future, float("Inf")) await asyncio.gather(awaiter(), cache_updater()) assert cache[cache_key] == (new_range_coroutine_future, float("Inf"))
Make sure that we don't overwrite a new cache entry that was placed while we were waiting to handle the result of a previously cached future
test_async_ttl_cache_dont_overwrite_new_cache_entry
python
Yelp/paasta
tests/test_async_utils.py
https://github.com/Yelp/paasta/blob/master/tests/test_async_utils.py
Apache-2.0
async def test_async_ttl_cache_recover_if_cache_entry_removed(): """Ensure we handle the case where we encounter an exception in the cached future but another coroutine awaiting the same future ran first and alraedy deleted the cache entry""" range_continue_event = asyncio.Event() num_awaiters_awaiting = DataHolder(value=0) class TestException(Exception): pass async def range_coroutine(): await range_continue_event.wait() raise TestException range_coroutine_future = asyncio.ensure_future(range_coroutine()) cache_key = functools._make_key((), {}, typed=False) cache = {cache_key: (range_coroutine_future, float("Inf"))} cached_range_coroutine = async_ttl_cache(cache=cache, ttl=0)(range_coroutine) async def awaiter(): num_awaiters_awaiting.value += 1 if num_awaiters_awaiting.value == 2: range_continue_event.set() try: await cached_range_coroutine() except TestException: pass # should not raise a KeyError! await asyncio.gather(awaiter(), awaiter())
Ensure we handle the case where we encounter an exception in the cached future but another coroutine awaiting the same future ran first and alraedy deleted the cache entry
test_async_ttl_cache_recover_if_cache_entry_removed
python
Yelp/paasta
tests/test_async_utils.py
https://github.com/Yelp/paasta/blob/master/tests/test_async_utils.py
Apache-2.0
async def test_async_ttl_cache_for_class_members_doesnt_leak_mem(): """Ensure that we aren't leaking memory""" x = 42 instance_caches = defaultdict(dict) class TestClass: @async_ttl_cache(ttl=None, cleanup_self=True, cache=instance_caches) async def f(self): return x o1 = TestClass() w1 = weakref.ref(o1) assert w1() is not None assert await o1.f() == x assert len(instance_caches) == 1 assert list(instance_caches.keys())[0]() == o1 del o1 assert len(instance_caches) == 0 assert w1() is None o2, o3, o4 = TestClass(), TestClass(), TestClass() assert await o2.f() == x assert await o3.f() == x assert await o4.f() == x assert len(instance_caches) == 3 del o2, o4 assert len(instance_caches) == 1 del o3 assert len(instance_caches) == 0
Ensure that we aren't leaking memory
test_async_ttl_cache_for_class_members_doesnt_leak_mem
python
Yelp/paasta
tests/test_async_utils.py
https://github.com/Yelp/paasta/blob/master/tests/test_async_utils.py
Apache-2.0
def test_brutal_bounce_no_existing_apps(self): """When marathon is unaware of a service, brutal bounce should try to create a marathon app.""" new_config = {"id": "foo.bar.12345"} happy_tasks = [] assert bounce_lib.brutal_bounce( new_config=new_config, new_app_running=False, happy_new_tasks=happy_tasks, old_non_draining_tasks=[], ) == {"create_app": True, "tasks_to_drain": set()}
When marathon is unaware of a service, brutal bounce should try to create a marathon app.
test_brutal_bounce_no_existing_apps
python
Yelp/paasta
tests/test_bounce_lib.py
https://github.com/Yelp/paasta/blob/master/tests/test_bounce_lib.py
Apache-2.0
def test_brutal_bounce_done(self): """When marathon has the desired app, and there are no other copies of the service running, brutal bounce should neither start nor stop anything.""" new_config = {"id": "foo.bar.12345", "instances": 5} happy_tasks = [mock.Mock() for _ in range(5)] assert bounce_lib.brutal_bounce( new_config=new_config, new_app_running=True, happy_new_tasks=happy_tasks, old_non_draining_tasks=[], ) == {"create_app": False, "tasks_to_drain": set()}
When marathon has the desired app, and there are no other copies of the service running, brutal bounce should neither start nor stop anything.
test_brutal_bounce_done
python
Yelp/paasta
tests/test_bounce_lib.py
https://github.com/Yelp/paasta/blob/master/tests/test_bounce_lib.py
Apache-2.0
def test_brutal_bounce_mid_bounce(self): """When marathon has the desired app, but there are other copies of the service running, brutal bounce should stop the old ones.""" new_config = {"id": "foo.bar.12345", "instances": 5} happy_tasks = [mock.Mock() for _ in range(5)] old_app_live_happy_tasks = [mock.Mock() for _ in range(5)] old_app_live_unhappy_tasks = [mock.Mock() for _ in range(5)] assert bounce_lib.brutal_bounce( new_config=new_config, new_app_running=True, happy_new_tasks=happy_tasks, old_non_draining_tasks=old_app_live_happy_tasks + old_app_live_unhappy_tasks, ) == { "create_app": False, "tasks_to_drain": set( old_app_live_happy_tasks + old_app_live_unhappy_tasks ), }
When marathon has the desired app, but there are other copies of the service running, brutal bounce should stop the old ones.
test_brutal_bounce_mid_bounce
python
Yelp/paasta
tests/test_bounce_lib.py
https://github.com/Yelp/paasta/blob/master/tests/test_bounce_lib.py
Apache-2.0
def test_brutal_bounce_old_but_no_new(self): """When marathon does not have the desired app, but there are other copies of the service running, brutal bounce should stop the old ones and start the new one.""" new_config = {"id": "foo.bar.12345", "instances": 5} old_app_live_happy_tasks = [mock.Mock() for _ in range(5)] old_app_live_unhappy_tasks = [mock.Mock() for _ in range(5)] assert bounce_lib.brutal_bounce( new_config=new_config, new_app_running=False, happy_new_tasks=[], old_non_draining_tasks=old_app_live_happy_tasks + old_app_live_unhappy_tasks, ) == { "create_app": True, "tasks_to_drain": set( old_app_live_happy_tasks + old_app_live_unhappy_tasks ), }
When marathon does not have the desired app, but there are other copies of the service running, brutal bounce should stop the old ones and start the new one.
test_brutal_bounce_old_but_no_new
python
Yelp/paasta
tests/test_bounce_lib.py
https://github.com/Yelp/paasta/blob/master/tests/test_bounce_lib.py
Apache-2.0
def test_upthendown_bounce_no_existing_apps(self): """When marathon is unaware of a service, upthendown bounce should try to create a marathon app.""" new_config = {"id": "foo.bar.12345"} happy_tasks = [] assert bounce_lib.upthendown_bounce( new_config=new_config, new_app_running=False, happy_new_tasks=happy_tasks, old_non_draining_tasks=[], ) == {"create_app": True, "tasks_to_drain": set()}
When marathon is unaware of a service, upthendown bounce should try to create a marathon app.
test_upthendown_bounce_no_existing_apps
python
Yelp/paasta
tests/test_bounce_lib.py
https://github.com/Yelp/paasta/blob/master/tests/test_bounce_lib.py
Apache-2.0
def test_upthendown_bounce_old_but_no_new(self): """When marathon has the desired app, but there are other copies of the service running, upthendown bounce should start the new one. but not stop the old one yet.""" new_config = {"id": "foo.bar.12345", "instances": 5} old_app_live_happy_tasks = [mock.Mock() for _ in range(5)] old_app_live_unhappy_tasks = [mock.Mock() for _ in range(5)] assert bounce_lib.upthendown_bounce( new_config=new_config, new_app_running=False, happy_new_tasks=[], old_non_draining_tasks=old_app_live_happy_tasks + old_app_live_unhappy_tasks, ) == {"create_app": True, "tasks_to_drain": set()}
When marathon has the desired app, but there are other copies of the service running, upthendown bounce should start the new one. but not stop the old one yet.
test_upthendown_bounce_old_but_no_new
python
Yelp/paasta
tests/test_bounce_lib.py
https://github.com/Yelp/paasta/blob/master/tests/test_bounce_lib.py
Apache-2.0
def test_upthendown_bounce_mid_bounce(self): """When marathon has the desired app, and there are other copies of the service running, but the new app is not fully up, upthendown bounce should not stop the old ones.""" new_config = {"id": "foo.bar.12345", "instances": 5} happy_tasks = [mock.Mock() for _ in range(3)] old_app_live_happy_tasks = [mock.Mock() for _ in range(5)] old_app_live_unhappy_tasks = [mock.Mock() for _ in range(5)] assert bounce_lib.upthendown_bounce( new_config=new_config, new_app_running=True, happy_new_tasks=happy_tasks, old_non_draining_tasks=old_app_live_happy_tasks + old_app_live_unhappy_tasks, ) == {"create_app": False, "tasks_to_drain": set()}
When marathon has the desired app, and there are other copies of the service running, but the new app is not fully up, upthendown bounce should not stop the old ones.
test_upthendown_bounce_mid_bounce
python
Yelp/paasta
tests/test_bounce_lib.py
https://github.com/Yelp/paasta/blob/master/tests/test_bounce_lib.py
Apache-2.0
def test_upthendown_bounce_cleanup(self): """When marathon has the desired app, and there are other copies of the service running, and the new app is fully up, upthendown bounce should stop the old ones.""" new_config = {"id": "foo.bar.12345", "instances": 5} happy_tasks = [mock.Mock() for _ in range(5)] old_app_live_happy_tasks = [mock.Mock() for _ in range(5)] old_app_live_unhappy_tasks = [mock.Mock() for _ in range(5)] assert bounce_lib.upthendown_bounce( new_config=new_config, new_app_running=True, happy_new_tasks=happy_tasks, old_non_draining_tasks=old_app_live_happy_tasks + old_app_live_unhappy_tasks, ) == { "create_app": False, "tasks_to_drain": set( old_app_live_happy_tasks + old_app_live_unhappy_tasks ), }
When marathon has the desired app, and there are other copies of the service running, and the new app is fully up, upthendown bounce should stop the old ones.
test_upthendown_bounce_cleanup
python
Yelp/paasta
tests/test_bounce_lib.py
https://github.com/Yelp/paasta/blob/master/tests/test_bounce_lib.py
Apache-2.0
def test_upthendown_bounce_done(self): """When marathon has the desired app, and there are no other copies of the service running, upthendown bounce should neither start nor stop anything.""" new_config = {"id": "foo.bar.12345", "instances": 5} happy_tasks = [mock.Mock() for _ in range(5)] old_app_live_happy_tasks = [] old_app_live_unhappy_tasks = [] assert bounce_lib.upthendown_bounce( new_config=new_config, new_app_running=True, happy_new_tasks=happy_tasks, old_non_draining_tasks=old_app_live_happy_tasks + old_app_live_unhappy_tasks, ) == {"create_app": False, "tasks_to_drain": set()}
When marathon has the desired app, and there are no other copies of the service running, upthendown bounce should neither start nor stop anything.
test_upthendown_bounce_done
python
Yelp/paasta
tests/test_bounce_lib.py
https://github.com/Yelp/paasta/blob/master/tests/test_bounce_lib.py
Apache-2.0
def test_crossover_bounce_no_existing_apps(self): """When marathon is unaware of a service, crossover bounce should try to create a marathon app.""" new_config = {"id": "foo.bar.12345", "instances": 5} happy_tasks = [] old_app_live_happy_tasks = [] old_app_live_unhappy_tasks = [] assert bounce_lib.crossover_bounce( new_config=new_config, new_app_running=False, happy_new_tasks=happy_tasks, old_non_draining_tasks=old_app_live_happy_tasks + old_app_live_unhappy_tasks, ) == {"create_app": True, "tasks_to_drain": set()}
When marathon is unaware of a service, crossover bounce should try to create a marathon app.
test_crossover_bounce_no_existing_apps
python
Yelp/paasta
tests/test_bounce_lib.py
https://github.com/Yelp/paasta/blob/master/tests/test_bounce_lib.py
Apache-2.0
def test_crossover_bounce_old_but_no_new(self): """When marathon only has old apps for this service, crossover bounce should start the new one, but not kill any old tasks yet.""" new_config = {"id": "foo.bar.12345", "instances": 5} happy_tasks = [] old_app_live_happy_tasks = [mock.Mock() for _ in range(5)] old_app_live_unhappy_tasks = [] assert bounce_lib.crossover_bounce( new_config=new_config, new_app_running=False, happy_new_tasks=happy_tasks, old_non_draining_tasks=old_app_live_happy_tasks + old_app_live_unhappy_tasks, ) == {"create_app": True, "tasks_to_drain": set()}
When marathon only has old apps for this service, crossover bounce should start the new one, but not kill any old tasks yet.
test_crossover_bounce_old_but_no_new
python
Yelp/paasta
tests/test_bounce_lib.py
https://github.com/Yelp/paasta/blob/master/tests/test_bounce_lib.py
Apache-2.0
def test_crossover_bounce_old_app_is_happy_but_no_new_app_happy_tasks(self): """When marathon only has old apps for this service and margin_factor != 1, crossover bounce should start the new app and kill some old tasks.""" new_config = {"id": "foo.bar.12345", "instances": 100} happy_tasks = [] old_app_live_happy_tasks = [mock.Mock() for _ in range(100)] old_app_live_unhappy_tasks = [] actual = bounce_lib.crossover_bounce( new_config=new_config, new_app_running=False, happy_new_tasks=happy_tasks, old_non_draining_tasks=old_app_live_happy_tasks + old_app_live_unhappy_tasks, margin_factor=0.95, ) assert actual["create_app"] is True assert len(actual["tasks_to_drain"]) == 5
When marathon only has old apps for this service and margin_factor != 1, crossover bounce should start the new app and kill some old tasks.
test_crossover_bounce_old_app_is_happy_but_no_new_app_happy_tasks
python
Yelp/paasta
tests/test_bounce_lib.py
https://github.com/Yelp/paasta/blob/master/tests/test_bounce_lib.py
Apache-2.0
def test_crossover_bounce_some_unhappy_old_some_happy_old_no_new(self): """When marathon only has old apps for this service, and some of them are unhappy (maybe they've been recently started), the crossover bounce should start a new app and prefer killing the unhappy tasks over the happy ones. """ new_config = {"id": "foo.bar.12345", "instances": 5} happy_tasks = [] old_app_live_happy_tasks = [mock.Mock() for _ in range(5)] old_app_live_unhappy_tasks = [mock.Mock() for _ in range(5)] assert bounce_lib.crossover_bounce( new_config=new_config, new_app_running=True, happy_new_tasks=happy_tasks, old_non_draining_tasks=old_app_live_happy_tasks + old_app_live_unhappy_tasks, ) == {"create_app": False, "tasks_to_drain": set(old_app_live_unhappy_tasks)}
When marathon only has old apps for this service, and some of them are unhappy (maybe they've been recently started), the crossover bounce should start a new app and prefer killing the unhappy tasks over the happy ones.
test_crossover_bounce_some_unhappy_old_some_happy_old_no_new
python
Yelp/paasta
tests/test_bounce_lib.py
https://github.com/Yelp/paasta/blob/master/tests/test_bounce_lib.py
Apache-2.0
def test_crossover_bounce_some_unhappy_old_no_happy_old_no_new_tasks_no_excess( self, ): """When marathon only has old apps for this service, and all of their tasks are unhappy, and there are no excess tasks, the crossover bounce should start a new app and not kill any old tasks. """ new_config = {"id": "foo.bar.12345", "instances": 5} happy_tasks = [] old_app_live_happy_tasks = [] old_app_live_unhappy_tasks = [mock.Mock() for _ in range(5)] assert bounce_lib.crossover_bounce( new_config=new_config, new_app_running=True, happy_new_tasks=happy_tasks, old_non_draining_tasks=old_app_live_happy_tasks + old_app_live_unhappy_tasks, ) == {"create_app": False, "tasks_to_drain": set()}
When marathon only has old apps for this service, and all of their tasks are unhappy, and there are no excess tasks, the crossover bounce should start a new app and not kill any old tasks.
test_crossover_bounce_some_unhappy_old_no_happy_old_no_new_tasks_no_excess
python
Yelp/paasta
tests/test_bounce_lib.py
https://github.com/Yelp/paasta/blob/master/tests/test_bounce_lib.py
Apache-2.0
def test_crossover_bounce_lots_of_unhappy_old_no_happy_old_no_new(self): """When marathon has a new app and multiple old apps, no new tasks are up, all old tasks are unhappy, and there are too many tasks running, the crossover bounce should kill some (but not all) of the old tasks. This represents a situation where """ new_config = {"id": "foo.bar.12345", "instances": 5} happy_tasks = [] old_app_live_happy_tasks = [] old_app_live_unhappy_tasks = [mock.Mock() for _ in range(10)] actual = bounce_lib.crossover_bounce( new_config=new_config, new_app_running=True, happy_new_tasks=happy_tasks, old_non_draining_tasks=old_app_live_happy_tasks + old_app_live_unhappy_tasks, ) assert actual["create_app"] is False assert len(actual["tasks_to_drain"]) == 5
When marathon has a new app and multiple old apps, no new tasks are up, all old tasks are unhappy, and there are too many tasks running, the crossover bounce should kill some (but not all) of the old tasks. This represents a situation where
test_crossover_bounce_lots_of_unhappy_old_no_happy_old_no_new
python
Yelp/paasta
tests/test_bounce_lib.py
https://github.com/Yelp/paasta/blob/master/tests/test_bounce_lib.py
Apache-2.0
def test_crossover_bounce_lots_of_unhappy_old_some_happy_old_new_app_exists_no_new_tasks( self, ): """When marathon has a new app and multiple old apps, no new tasks are up, one of the old apps is healthy and the other is not, only unhealthy tasks should get killed. """ new_config = {"id": "foo.bar.12345", "instances": 5} happy_tasks = [] old_app_live_happy_tasks = [mock.Mock() for _ in range(5)] old_app_live_unhappy_tasks = [mock.Mock() for _ in range(5)] actual = bounce_lib.crossover_bounce( new_config=new_config, new_app_running=True, happy_new_tasks=happy_tasks, old_non_draining_tasks=old_app_live_happy_tasks + old_app_live_unhappy_tasks, ) assert actual["create_app"] is False assert actual["tasks_to_drain"] == set(old_app_live_unhappy_tasks) # Since there are plenty of unhappy old tasks, we should not kill any new ones. assert len(actual["tasks_to_drain"] & set(old_app_live_happy_tasks)) == 0
When marathon has a new app and multiple old apps, no new tasks are up, one of the old apps is healthy and the other is not, only unhealthy tasks should get killed.
test_crossover_bounce_lots_of_unhappy_old_some_happy_old_new_app_exists_no_new_tasks
python
Yelp/paasta
tests/test_bounce_lib.py
https://github.com/Yelp/paasta/blob/master/tests/test_bounce_lib.py
Apache-2.0
def test_crossover_bounce_mid_bounce(self): """When marathon has the desired app, and there are other copies of the service running, but the new app is not fully up, crossover bounce should only stop a few of the old instances.""" new_config = {"id": "foo.bar.12345", "instances": 5} happy_tasks = [mock.Mock() for _ in range(3)] old_app_live_happy_tasks = [mock.Mock() for _ in range(5)] old_app_live_unhappy_tasks = [] actual = bounce_lib.crossover_bounce( new_config=new_config, new_app_running=True, happy_new_tasks=happy_tasks, old_non_draining_tasks=old_app_live_happy_tasks + old_app_live_unhappy_tasks, ) assert actual["create_app"] is False assert len(actual["tasks_to_drain"]) == 3
When marathon has the desired app, and there are other copies of the service running, but the new app is not fully up, crossover bounce should only stop a few of the old instances.
test_crossover_bounce_mid_bounce
python
Yelp/paasta
tests/test_bounce_lib.py
https://github.com/Yelp/paasta/blob/master/tests/test_bounce_lib.py
Apache-2.0
def test_crossover_bounce_mid_bounce_some_happy_old_some_unhappy_old(self): """When marathon has the desired app, and there are other copies of the service running, and some of those older tasks are unhappy, we should prefer killing the unhappy tasks.""" new_config = {"id": "foo.bar.12345", "instances": 5} happy_tasks = [mock.Mock() for _ in range(3)] old_app_live_happy_tasks = [mock.Mock() for _ in range(5)] old_app_live_unhappy_tasks = [mock.Mock() for _ in range(1)] actual = bounce_lib.crossover_bounce( new_config=new_config, new_app_running=True, happy_new_tasks=happy_tasks, old_non_draining_tasks=old_app_live_happy_tasks + old_app_live_unhappy_tasks, ) assert actual["create_app"] is False assert len(actual["tasks_to_drain"]) == 4 # There are fewer unhappy old tasks than excess tasks, so we should kill all unhappy old ones, plus a few # happy ones. assert set(old_app_live_unhappy_tasks).issubset(actual["tasks_to_drain"])
When marathon has the desired app, and there are other copies of the service running, and some of those older tasks are unhappy, we should prefer killing the unhappy tasks.
test_crossover_bounce_mid_bounce_some_happy_old_some_unhappy_old
python
Yelp/paasta
tests/test_bounce_lib.py
https://github.com/Yelp/paasta/blob/master/tests/test_bounce_lib.py
Apache-2.0
def test_crossover_bounce_mid_bounce_some_happy_old_lots_of_unhappy_old(self): """When marathon has the desired app, and there are other copies of the service running, and there are more unhappy old tasks than excess tasks, we should only kill unhappy tasks. """ new_config = {"id": "foo.bar.12345", "instances": 5} happy_tasks = [mock.Mock() for _ in range(3)] old_app_live_happy_tasks = [mock.Mock() for _ in range(2)] old_app_live_unhappy_tasks = [mock.Mock() for _ in range(5)] actual = bounce_lib.crossover_bounce( new_config=new_config, new_app_running=True, happy_new_tasks=happy_tasks, old_non_draining_tasks=old_app_live_happy_tasks + old_app_live_unhappy_tasks, ) assert actual["create_app"] is False # There are as many unhappy old tasks as excess tasks, so all tasks that we kill should be old unhappy ones. assert len(actual["tasks_to_drain"]) == 5 assert actual["tasks_to_drain"] == set(old_app_live_unhappy_tasks)
When marathon has the desired app, and there are other copies of the service running, and there are more unhappy old tasks than excess tasks, we should only kill unhappy tasks.
test_crossover_bounce_mid_bounce_some_happy_old_lots_of_unhappy_old
python
Yelp/paasta
tests/test_bounce_lib.py
https://github.com/Yelp/paasta/blob/master/tests/test_bounce_lib.py
Apache-2.0
def test_crossover_bounce_mid_bounce_no_happy_old_lots_of_unhappy_old(self): """When marathon has the desired app, and there are other copies of the service running, but none of the old tasks are happy, and there are excess tasks, we should kill some (but not all) unhappy old tasks.""" new_config = {"id": "foo.bar.12345", "instances": 5} happy_tasks = [mock.Mock() for _ in range(3)] old_app_live_happy_tasks = [] old_app_live_unhappy_tasks = [mock.Mock() for _ in range(6)] actual = bounce_lib.crossover_bounce( new_config=new_config, new_app_running=True, happy_new_tasks=happy_tasks, old_non_draining_tasks=old_app_live_happy_tasks + old_app_live_unhappy_tasks, ) assert actual["create_app"] is False assert len(actual["tasks_to_drain"]) == 4
When marathon has the desired app, and there are other copies of the service running, but none of the old tasks are happy, and there are excess tasks, we should kill some (but not all) unhappy old tasks.
test_crossover_bounce_mid_bounce_no_happy_old_lots_of_unhappy_old
python
Yelp/paasta
tests/test_bounce_lib.py
https://github.com/Yelp/paasta/blob/master/tests/test_bounce_lib.py
Apache-2.0
def test_crossover_bounce_done(self): """When marathon has the desired app, and there are no other copies of the service running, crossover bounce should neither start nor stop anything.""" new_config = {"id": "foo.bar.12345", "instances": 5} happy_tasks = [mock.Mock() for _ in range(5)] old_app_live_happy_tasks = [] old_app_live_unhappy_tasks = [] assert bounce_lib.crossover_bounce( new_config=new_config, new_app_running=True, happy_new_tasks=happy_tasks, old_non_draining_tasks=old_app_live_happy_tasks + old_app_live_unhappy_tasks, ) == {"create_app": False, "tasks_to_drain": set()}
When marathon has the desired app, and there are no other copies of the service running, crossover bounce should neither start nor stop anything.
test_crossover_bounce_done
python
Yelp/paasta
tests/test_bounce_lib.py
https://github.com/Yelp/paasta/blob/master/tests/test_bounce_lib.py
Apache-2.0
def test_downthenup_bounce_no_existing_apps(self): """When marathon is unaware of a service, downthenup bounce should try to create a marathon app.""" new_config = {"id": "foo.bar.12345", "instances": 5} happy_tasks = [] old_app_live_happy_tasks = [] old_app_live_unhappy_tasks = [] assert bounce_lib.downthenup_bounce( new_config=new_config, new_app_running=False, happy_new_tasks=happy_tasks, old_non_draining_tasks=old_app_live_happy_tasks + old_app_live_unhappy_tasks, ) == {"create_app": True, "tasks_to_drain": set()}
When marathon is unaware of a service, downthenup bounce should try to create a marathon app.
test_downthenup_bounce_no_existing_apps
python
Yelp/paasta
tests/test_bounce_lib.py
https://github.com/Yelp/paasta/blob/master/tests/test_bounce_lib.py
Apache-2.0
def test_downthenup_bounce_old_but_no_new(self): """When marathon has only old copies of the service, downthenup_bounce should kill them and not start a new one yet.""" new_config = {"id": "foo.bar.12345", "instances": 5} happy_tasks = [] old_app_live_happy_tasks = [mock.Mock() for _ in range(5)] old_app_live_unhappy_tasks = [mock.Mock() for _ in range(1)] assert bounce_lib.downthenup_bounce( new_config=new_config, new_app_running=False, happy_new_tasks=happy_tasks, old_non_draining_tasks=old_app_live_happy_tasks + old_app_live_unhappy_tasks, ) == { "create_app": False, "tasks_to_drain": set( old_app_live_happy_tasks + old_app_live_unhappy_tasks ), }
When marathon has only old copies of the service, downthenup_bounce should kill them and not start a new one yet.
test_downthenup_bounce_old_but_no_new
python
Yelp/paasta
tests/test_bounce_lib.py
https://github.com/Yelp/paasta/blob/master/tests/test_bounce_lib.py
Apache-2.0
def test_downthenup_bounce_done(self): """When marathon has the desired app, and there are no other copies of the service running, downthenup bounce should neither start nor stop anything.""" new_config = {"id": "foo.bar.12345", "instances": 5} happy_tasks = [mock.Mock() for _ in range(5)] assert bounce_lib.downthenup_bounce( new_config=new_config, new_app_running=True, happy_new_tasks=happy_tasks, old_non_draining_tasks=[], ) == {"create_app": False, "tasks_to_drain": set()}
When marathon has the desired app, and there are no other copies of the service running, downthenup bounce should neither start nor stop anything.
test_downthenup_bounce_done
python
Yelp/paasta
tests/test_bounce_lib.py
https://github.com/Yelp/paasta/blob/master/tests/test_bounce_lib.py
Apache-2.0
def test_service_group_chain_name(service_group): """The chain name must be stable, unique, and short.""" assert service_group.chain_name == "PAASTA.my_cool_se.f031797563" assert len(service_group.chain_name) <= 28
The chain name must be stable, unique, and short.
test_service_group_chain_name
python
Yelp/paasta
tests/test_firewall.py
https://github.com/Yelp/paasta/blob/master/tests/test_firewall.py
Apache-2.0
def test_service_group_rules_empty_when_service_is_deleted( service_group, mock_service_config ): """A deleted service which still has running containers shouldn't cause exceptions.""" with mock.patch.object( firewall, "get_instance_config", side_effect=NoConfigurationForServiceError() ): assert ( service_group.get_rules( DEFAULT_SOA_DIR, firewall.DEFAULT_SYNAPSE_SERVICE_DIR ) == () )
A deleted service which still has running containers shouldn't cause exceptions.
test_service_group_rules_empty_when_service_is_deleted
python
Yelp/paasta
tests/test_firewall.py
https://github.com/Yelp/paasta/blob/master/tests/test_firewall.py
Apache-2.0
def test_get_sidecar_resource_requirements_default_requirements(self): """When request is unspecified, it should default to the 0.1, 1024Mi, 256Mi.""" try: del self.deployment.config_dict["sidecar_resource_requirements"] except KeyError: pass system_paasta_config = mock.Mock( get_sidecar_requirements_config=mock.Mock( return_value={ "hacheck": { "cpu": 0.1, "memory": "512Mi", "ephemeral-storage": "256Mi", }, } ) ) assert self.deployment.get_sidecar_resource_requirements( "hacheck", system_paasta_config ) == V1ResourceRequirements( limits={"cpu": 0.1, "memory": "512Mi", "ephemeral-storage": "256Mi"}, requests={"cpu": 0.1, "memory": "512Mi", "ephemeral-storage": "256Mi"}, )
When request is unspecified, it should default to the 0.1, 1024Mi, 256Mi.
test_get_sidecar_resource_requirements_default_requirements
python
Yelp/paasta
tests/test_kubernetes_tools.py
https://github.com/Yelp/paasta/blob/master/tests/test_kubernetes_tools.py
Apache-2.0
def test_get_node_affinity_no_reqs_with_global_override(self): """ Given global node affinity overrides and no deployment specific requirements, the globals should be used """ assert self.deployment.get_node_affinity( {"default": {"topology.kubernetes.io/zone": ["us-west-1a", "us-west-1b"]}}, ) == V1NodeAffinity( required_during_scheduling_ignored_during_execution=V1NodeSelector( node_selector_terms=[ V1NodeSelectorTerm( match_expressions=[ V1NodeSelectorRequirement( key="topology.kubernetes.io/zone", operator="In", values=["us-west-1a", "us-west-1b"], ) ] ) ], ), )
Given global node affinity overrides and no deployment specific requirements, the globals should be used
test_get_node_affinity_no_reqs_with_global_override
python
Yelp/paasta
tests/test_kubernetes_tools.py
https://github.com/Yelp/paasta/blob/master/tests/test_kubernetes_tools.py
Apache-2.0
def test_get_node_affinity_no_reqs_with_global_override_and_deployment_config(self): """ Given global node affinity overrides and deployment specific requirements, globals should be ignored """ deployment = KubernetesDeploymentConfig( service="kurupt", instance="fm", cluster="brentford", config_dict={ "node_selectors": {"topology.kubernetes.io/zone": ["us-west-1a"]}, "node_selectors_preferred": [ { "weight": 1, "preferences": { "instance_type": ["a1.1xlarge"], }, } ], }, branch_dict=None, soa_dir="/nail/blah", ) actual = deployment.get_node_affinity( {"default": {"topology.kubernetes.io/zone": ["us-west-1a", "us-west-1b"]}}, ) expected = V1NodeAffinity( required_during_scheduling_ignored_during_execution=V1NodeSelector( node_selector_terms=[ V1NodeSelectorTerm( match_expressions=[ V1NodeSelectorRequirement( key="topology.kubernetes.io/zone", operator="In", values=["us-west-1a"], ), ] ) ], ), preferred_during_scheduling_ignored_during_execution=[ V1PreferredSchedulingTerm( weight=1, preference=V1NodeSelectorTerm( match_expressions=[ V1NodeSelectorRequirement( key="node.kubernetes.io/instance-type", operator="In", values=["a1.1xlarge"], ), ] ), ) ], ) assert actual == expected
Given global node affinity overrides and deployment specific requirements, globals should be ignored
test_get_node_affinity_no_reqs_with_global_override_and_deployment_config
python
Yelp/paasta
tests/test_kubernetes_tools.py
https://github.com/Yelp/paasta/blob/master/tests/test_kubernetes_tools.py
Apache-2.0
def test_get_node_affinity_no_reqs_with_global_override_and_deployment_config_habitat( self, ): """ Given global node affinity overrides and deployment specific zone selector, globals should be ignored """ deployment = KubernetesDeploymentConfig( service="kurupt", instance="fm", cluster="brentford", config_dict={"node_selectors": {"yelp.com/habitat": ["uswest1astagef"]}}, branch_dict=None, soa_dir="/nail/blah", ) actual = deployment.get_node_affinity( {"default": {"topology.kubernetes.io/zone": ["us-west-1a", "us-west-1b"]}}, ) expected = V1NodeAffinity( required_during_scheduling_ignored_during_execution=V1NodeSelector( node_selector_terms=[ V1NodeSelectorTerm( match_expressions=[ V1NodeSelectorRequirement( key="yelp.com/habitat", operator="In", values=["uswest1astagef"], ), ] ) ], ) ) assert actual == expected
Given global node affinity overrides and deployment specific zone selector, globals should be ignored
test_get_node_affinity_no_reqs_with_global_override_and_deployment_config_habitat
python
Yelp/paasta
tests/test_kubernetes_tools.py
https://github.com/Yelp/paasta/blob/master/tests/test_kubernetes_tools.py
Apache-2.0
def gen_mesos_cli_fobj(file_path, file_lines): """mesos.cli.cluster.files (0.1.5), returns a list of mesos.cli.mesos_file.File `File` is an iterator-like object. """ async def _readlines_reverse(): for line in reversed(file_lines): yield line fobj = mock.create_autospec(mesos.mesos_file.File) fobj.path = file_path fobj._readlines_reverse = _readlines_reverse return fobj
mesos.cli.cluster.files (0.1.5), returns a list of mesos.cli.mesos_file.File `File` is an iterator-like object.
gen_mesos_cli_fobj
python
Yelp/paasta
tests/test_mesos_tools.py
https://github.com/Yelp/paasta/blob/master/tests/test_mesos_tools.py
Apache-2.0
def kubernetes_cluster_config(): """Return a sample dict to mock paasta_tools.utils.load_service_instance_configs""" return { "main": { "instances": 3, "deploy_group": "{cluster}.non_canary", "cpus": 0.1, "mem": 1000, }, "canary": { "instances": 1, "deploy_group": "{cluster}.canary", "cpus": 0.1, "mem": 1000, }, "not_deployed": { "instances": 1, "deploy_group": "not_deployed", "cpus": 0.1, "mem": 1000, }, }
Return a sample dict to mock paasta_tools.utils.load_service_instance_configs
kubernetes_cluster_config
python
Yelp/paasta
tests/test_paasta_service_config_loader.py
https://github.com/Yelp/paasta/blob/master/tests/test_paasta_service_config_loader.py
Apache-2.0
def test_get_action_config( self, mock_load_deployments, action_service, action_deploy, cluster, expected_cluster, ): """Check resulting action config with various overrides from the action.""" action_dict = {"command": "echo first"} if action_service: action_dict["service"] = action_service if action_deploy: action_dict["deploy_group"] = action_deploy job_service = "my_service" job_deploy = "prod" expected_service = action_service or job_service expected_deploy = action_deploy or job_deploy job_dict = { "node": "batch_server", "schedule": "daily 12:10:00", "service": job_service, "deploy_group": job_deploy, "max_runtime": "2h", "actions": {"normal": action_dict}, "monitoring": {"team": "noop"}, } soa_dir = "/other_dir" job_config = tron_tools.TronJobConfig( "my_job", job_dict, cluster, soa_dir=soa_dir ) with mock.patch( "paasta_tools.tron_tools.load_system_paasta_config", autospec=True, return_value=MOCK_SYSTEM_PAASTA_CONFIG_OVERRIDES, ): action_config = job_config._get_action_config( "normal", action_dict=action_dict ) mock_load_deployments.assert_called_once_with(expected_service, soa_dir) mock_deployments_json = mock_load_deployments.return_value mock_deployments_json.get_docker_image_for_deploy_group.assert_called_once_with( expected_deploy ) mock_deployments_json.get_git_sha_for_deploy_group.assert_called_once_with( expected_deploy ) mock_deployments_json.get_image_version_for_deploy_group.assert_called_once_with( expected_deploy ) expected_branch_dict = { "docker_image": mock_deployments_json.get_docker_image_for_deploy_group.return_value, "git_sha": mock_deployments_json.get_git_sha_for_deploy_group.return_value, "image_version": mock_deployments_json.get_image_version_for_deploy_group.return_value, "desired_state": "start", "force_bounce": None, } expected_input_action_config = { "command": "echo first", "service": expected_service, "deploy_group": expected_deploy, "monitoring": {"team": "noop"}, } assert action_config == tron_tools.TronActionConfig( service=expected_service, instance=tron_tools.compose_instance("my_job", "normal"), config_dict=expected_input_action_config, branch_dict=expected_branch_dict, soa_dir=soa_dir, cluster=expected_cluster, )
Check resulting action config with various overrides from the action.
test_get_action_config
python
Yelp/paasta
tests/test_tron_tools.py
https://github.com/Yelp/paasta/blob/master/tests/test_tron_tools.py
Apache-2.0
def test_format_path(self): """Test the path formatting for FileLogWriter""" fw = utils.FileLogWriter( "/logs/{service}/{component}/{level}/{cluster}/{instance}" ) expected = "/logs/a/b/c/d/e" assert expected == fw.format_path("a", "b", "c", "d", "e")
Test the path formatting for FileLogWriter
test_format_path
python
Yelp/paasta
tests/test_utils.py
https://github.com/Yelp/paasta/blob/master/tests/test_utils.py
Apache-2.0
def test_maybe_flock(self): """Make sure we flock and unflock when flock=True""" with mock.patch("paasta_tools.utils.fcntl", autospec=True) as mock_fcntl: fw = utils.FileLogWriter("/dev/null", flock=True) mock_file = mock.Mock() with fw.maybe_flock(mock_file): mock_fcntl.flock.assert_called_once_with( mock_file.fileno(), mock_fcntl.LOCK_EX ) mock_fcntl.flock.reset_mock() mock_fcntl.flock.assert_called_once_with( mock_file.fileno(), mock_fcntl.LOCK_UN )
Make sure we flock and unflock when flock=True
test_maybe_flock
python
Yelp/paasta
tests/test_utils.py
https://github.com/Yelp/paasta/blob/master/tests/test_utils.py
Apache-2.0
def test_maybe_flock_flock_false(self): """Make sure we don't flock/unflock when flock=False""" with mock.patch("paasta_tools.utils.fcntl", autospec=True) as mock_fcntl: fw = utils.FileLogWriter("/dev/null", flock=False) mock_file = mock.Mock() with fw.maybe_flock(mock_file): assert mock_fcntl.flock.call_count == 0 assert mock_fcntl.flock.call_count == 0
Make sure we don't flock/unflock when flock=False
test_maybe_flock_flock_false
python
Yelp/paasta
tests/test_utils.py
https://github.com/Yelp/paasta/blob/master/tests/test_utils.py
Apache-2.0
def test_log_makes_exactly_one_write_call(self): """We want to make sure that log() makes exactly one call to write, since that's how we ensure atomicity.""" fake_file = mock.Mock() fake_contextmgr = mock.Mock( __enter__=lambda _self: fake_file, __exit__=lambda _self, t, v, tb: None ) fake_line = "text" * 1000000 with mock.patch( "paasta_tools.utils.io.FileIO", return_value=fake_contextmgr, autospec=True ) as mock_FileIO: fw = utils.FileLogWriter("/dev/null", flock=False) with mock.patch( "paasta_tools.utils.format_log_line", return_value=fake_line, autospec=True, ) as fake_fll: fw.log( "service", "line", "component", level="level", cluster="cluster", instance="instance", ) fake_fll.assert_called_once_with( "level", "cluster", "service", "instance", "component", "line" ) mock_FileIO.assert_called_once_with("/dev/null", mode=fw.mode, closefd=True) fake_file.write.assert_called_once_with(f"{fake_line}\n".encode("UTF-8"))
We want to make sure that log() makes exactly one call to write, since that's how we ensure atomicity.
test_log_makes_exactly_one_write_call
python
Yelp/paasta
tests/test_utils.py
https://github.com/Yelp/paasta/blob/master/tests/test_utils.py
Apache-2.0
def create_mock_instance_config(instance_type, namespace): """ Creates a mock InstanceConfig with specified instance_type and namespace. :param instance_type: The type of the instance (e.g., "kubernetes", "paasta-native"). :param namespace: The namespace associated with the instance. :return: A mock InstanceConfig object. """ mock_instance_config = MagicMock() mock_instance_config.get_instance_type.return_value = instance_type mock_instance_config.get_namespace.return_value = namespace return mock_instance_config
Creates a mock InstanceConfig with specified instance_type and namespace. :param instance_type: The type of the instance (e.g., "kubernetes", "paasta-native"). :param namespace: The namespace associated with the instance. :return: A mock InstanceConfig object.
create_mock_instance_config
python
Yelp/paasta
tests/cli/test_cmds_list_namespaces.py
https://github.com/Yelp/paasta/blob/master/tests/cli/test_cmds_list_namespaces.py
Apache-2.0
def reraise_keyboardinterrupt(): """If it's not caught, this kills pytest :'(""" try: yield except FakeKeyboardInterrupt: # pragma: no cover (error case only) raise AssertionError("library failed to catch KeyboardInterrupt")
If it's not caught, this kills pytest :'(
reraise_keyboardinterrupt
python
Yelp/paasta
tests/cli/test_cmds_logs.py
https://github.com/Yelp/paasta/blob/master/tests/cli/test_cmds_logs.py
Apache-2.0
def test_jira_ticket_parameter( mock_get_smart_paasta_instance_name, mock_configure_and_run_docker_container, mock_spark_conf_builder, mock_parse_user_spark_args, mock_get_spark_app_name, mock_get_docker_image, mock_get_aws_credentials, mock_get_instance_config, mock_load_system_paasta_config_spark_run, mock_load_system_paasta_config_utils, mock_validate_work_dir, jira_ticket, expected_in_call, ): """Test that the jira_ticket parameter is correctly passed to SparkConfBuilder.""" args = argparse.Namespace( work_dir="/tmp/local", cmd="pyspark", build=True, image=None, enable_compact_bin_packing=False, disable_compact_bin_packing=False, service="test-service", instance="test-instance", cluster="test-cluster", pool="test-pool", yelpsoa_config_root="/path/to/soa", aws_credentials_yaml="/path/to/creds", aws_profile=None, spark_args="spark.cores.max=100 spark.executor.cores=10", cluster_manager=spark_run.CLUSTER_MANAGER_K8S, timeout_job_runtime="1m", enable_dra=False, aws_region="test-region", force_spark_resource_configs=False, assume_aws_role=None, aws_role_duration=3600, k8s_server_address=None, tronfig=None, job_id=None, use_web_identity=False, uses_bulkdata=True, get_eks_token_via_iam_user=False, force_pod_identity=False, executor_pod_identity=False, jira_ticket=jira_ticket, ) mock_load_system_paasta_config_utils.return_value.get_kube_clusters.return_value = ( {} ) mock_load_system_paasta_config_spark_run.return_value.get_cluster_aliases.return_value = ( {} ) mock_load_system_paasta_config_spark_run.return_value.get_pools_for_cluster.return_value = [ "test-pool" ] mock_load_system_paasta_config_spark_run.return_value.get_eks_cluster_aliases.return_value = { "test-cluster": "test-cluster" } mock_get_docker_image.return_value = DUMMY_DOCKER_IMAGE_DIGEST mock_spark_conf_builder.return_value.get_spark_conf.return_value = { "spark.kubernetes.executor.podTemplateFile": "/test/pod-template.yaml", } mock_get_instance_config.return_value.get_iam_role.return_value = None spark_run.paasta_spark_run(args) # Verify that jira_ticket is passed correctly to SparkConfBuilder.get_spark_conf mock_spark_conf_builder.return_value.get_spark_conf.assert_called_once_with( cluster_manager=spark_run.CLUSTER_MANAGER_K8S, spark_app_base_name=mock_get_spark_app_name.return_value, docker_img=DUMMY_DOCKER_IMAGE_DIGEST, user_spark_opts=mock_parse_user_spark_args.return_value, paasta_cluster="test-cluster", paasta_pool="test-pool", paasta_service="test-service", paasta_instance=mock_get_smart_paasta_instance_name.return_value, extra_volumes=mock_get_instance_config.return_value.get_volumes.return_value, aws_creds=mock_get_aws_credentials.return_value, aws_region="test-region", force_spark_resource_configs=False, use_eks=True, k8s_server_address=None, service_account_name=None, jira_ticket=jira_ticket, )
Test that the jira_ticket parameter is correctly passed to SparkConfBuilder.
test_jira_ticket_parameter
python
Yelp/paasta
tests/cli/test_cmds_spark_run.py
https://github.com/Yelp/paasta/blob/master/tests/cli/test_cmds_spark_run.py
Apache-2.0
def _formatted_table_to_dict(formatted_table): """Convert a single-row table with header to a dictionary""" headers = [ header.strip() for header in formatted_table[0].split(" ") if len(header) > 0 ] fields = [ field.strip() for field in formatted_table[1].split(" ") if len(field) > 0 ] return dict(zip(headers, fields))
Convert a single-row table with header to a dictionary
_formatted_table_to_dict
python
Yelp/paasta
tests/cli/test_cmds_status.py
https://github.com/Yelp/paasta/blob/master/tests/cli/test_cmds_status.py
Apache-2.0
def test_suggest_smartstack_proxy_port_too_many_services( self, mock_read_etc_services ): """If all the ports are taken, we should raise an error""" yelpsoa_config_root = "fake_yelpsoa_config_root" walk_return = [ ("fake_root1", "fake_dir1", ["smartstack.yaml"]), ("fake_root2", "fake_dir2", ["smartstack.yaml"]), ("fake_root3", "fake_dir3", ["smartstack.yaml"]), ] mock_walk = mock.Mock(return_value=walk_return) # See http://www.voidspace.org.uk/python/mock/examples.html#multiple-calls-with-different-effects get_smartstack_proxy_ports_from_file_returns = [ {20001, 20003}, {20002}, {55555}, # bogus out-of-range value ] def get_smarstack_proxy_ports_from_file_side_effect(*args): return get_smartstack_proxy_ports_from_file_returns.pop(0) mock_get_smartstack_proxy_ports_from_file = mock.Mock( side_effect=get_smarstack_proxy_ports_from_file_side_effect ) with mock.patch("os.walk", mock_walk, autospec=None): with mock.patch( "paasta_tools.cli.fsm.autosuggest._get_smartstack_proxy_ports_from_file", mock_get_smartstack_proxy_ports_from_file, autospec=None, ): with raises(Exception) as exc: autosuggest.suggest_smartstack_proxy_port( yelpsoa_config_root, range_min=20001, range_max=20003 ) assert ( "There are no more ports available in the range [20001, 20003]" == str(exc.value) )
If all the ports are taken, we should raise an error
test_suggest_smartstack_proxy_port_too_many_services
python
Yelp/paasta
tests/cli/fsm/test_autosuggest.py
https://github.com/Yelp/paasta/blob/master/tests/cli/fsm/test_autosuggest.py
Apache-2.0
def process_queue(self, timeout=1): """ Called only by the internal thread. Takes updates from the input queue and returns them. If updates clash on (key, prop_name), only the first is returned, and the rest are saved in `self.ui_updates` to be processed on subsequent runs. """ task_mutations = [] stop = False def process(elem): nonlocal stop if elem[0] == "task_mutations": task_mutations.extend(elem[1]) elif elem[0] in "ui_updates": self.ui_updates.extend(elem[1]) elif elem[0] == "stop": stop = True else: raise Exception(f"Malformed update: {elem}") try: # Block on an empty queue only if we don't have any # previous updates saved in `self.ui_updates`. if not self.ui_updates: process(self.input_queue.get(timeout=timeout)) while True: try: process(self.input_queue.get_nowait()) except Empty: break except Empty: pass if not self.ui_updates: return stop, [], task_mutations # We apply updates in batches disjoint on (key, prop_name). We # assume it's ok to apply multiple updates in the same frame # as long as we're not updating the same prop multiple times. check_set = set() ui_updates = [] for (key, prop_name, value) in list(self.ui_updates): if (key, prop_name) not in check_set: check_set.add((key, prop_name)) ui_updates.append((key, prop_name, value)) self.ui_updates.remove((key, prop_name, value)) return stop, ui_updates, task_mutations
Called only by the internal thread. Takes updates from the input queue and returns them. If updates clash on (key, prop_name), only the first is returned, and the rest are saved in `self.ui_updates` to be processed on subsequent runs.
process_queue
python
hyperdiv/hyperdiv
hyperdiv/app_runner.py
https://github.com/hyperdiv/hyperdiv/blob/master/hyperdiv/app_runner.py
Apache-2.0
def run_user_app(self, frame): """ Called only by the internal thread. Runs the user app function in the context of `frame` and returns the resulting root container. """ if frame.prev_frame_mutations: self.cache.eject_entries_for_mutated_props(frame.prev_frame_mutations) root_container = vbox(collect=False) with root_container: with timing("App", profile=PROFILE_RUN): self.app_function() logger.debug(f"Component count: {AppRunnerFrame.current().component_count}") return root_container
Called only by the internal thread. Runs the user app function in the context of `frame` and returns the resulting root container.
run_user_app
python
hyperdiv/hyperdiv
hyperdiv/app_runner.py
https://github.com/hyperdiv/hyperdiv/blob/master/hyperdiv/app_runner.py
Apache-2.0
def apply_ui_updates(self, ui_updates): """ Called only by the internal thread. Applies the given `ui_updates` to the application state, and returns the mutations caused by these updates, separating normal mutations from event mutations. """ with UIUpdatesFrame(self) as ui_update_frame: logger.debug(f"UI Updates: {ui_updates}") for key, prop_name, value in ui_updates: if value == "$reset": ui_update_frame.reset_state(key, prop_name) else: ui_update_frame.update_state(key, prop_name, value) return ui_update_frame.mutations, ui_update_frame.event_mutations
Called only by the internal thread. Applies the given `ui_updates` to the application state, and returns the mutations caused by these updates, separating normal mutations from event mutations.
apply_ui_updates
python
hyperdiv/hyperdiv
hyperdiv/app_runner.py
https://github.com/hyperdiv/hyperdiv/blob/master/hyperdiv/app_runner.py
Apache-2.0
def render_and_reply(self, frame, root_container=None, diff=None): """ Called only by the internal thread. Given a root container or a diff, render that root container or diff and send a reply on the websocket. The reply will contain any pending commands and changed singletons. Even if there is no relevant container or diff to send, but there are commands or singletons, a reply will be sent. """ # frame.set_phase(FramePhase.Rendering) output = dict() # Render the container or diff with timing("Set UI Prop Values"): if root_container: self.ui_prop_state.set_prop_values_from_component(root_container) elif diff: self.ui_prop_state.set_prop_values_from_diff(diff) with timing("Render", profile=PROFILE_RENDER): if root_container: output["dom"] = root_container.render() elif diff: output["diff"] = diff.render() # Render changed singletons singletons = SingletonCollector.create_ui_singletons() for singleton in singletons: if self.ui_prop_state.component_changed(singleton): self.ui_prop_state.set_prop_values_from_component(singleton) output.setdefault("singletons", dict()) output["singletons"][singleton._name] = singleton.render() # Render commands if len(self.pending_commands) > 0: output["commands"] = [command.render() for command in self.pending_commands] self.pending_commands.clear() # If anything changed, send it to the UI if len(output) > 0: if PRINT_OUTPUT: logger.debug(json.dumps(output, indent=2)) self.connection.send(output)
Called only by the internal thread. Given a root container or a diff, render that root container or diff and send a reply on the websocket. The reply will contain any pending commands and changed singletons. Even if there is no relevant container or diff to send, but there are commands or singletons, a reply will be sent.
render_and_reply
python
hyperdiv/hyperdiv
hyperdiv/app_runner.py
https://github.com/hyperdiv/hyperdiv/blob/master/hyperdiv/app_runner.py
Apache-2.0
def diff_and_reply(self, frame, root_container): """ Called only by the internal thread. Sends a reply with the given root container (or a diff if a previous container exists to diff against). """ dom = None dom_diff = None if self.previous_root_container: with timing("Diff", profile=PROFILE_DIFF): dom_diff = diff(self.previous_root_container, root_container) else: dom = root_container self.previous_root_container = root_container self.render_and_reply(frame, root_container=dom, diff=dom_diff)
Called only by the internal thread. Sends a reply with the given root container (or a diff if a previous container exists to diff against).
diff_and_reply
python
hyperdiv/hyperdiv
hyperdiv/app_runner.py
https://github.com/hyperdiv/hyperdiv/blob/master/hyperdiv/app_runner.py
Apache-2.0
def run(self, mutations, event_mutations=None): """ Called only by the internal thread. Runs the user app if necessary, and sends a reply to the browser if necessary. `mutations` is a set of mutations from a prior frame. Using these `mutations`, we determine if the user function is "dirty" and needs to run again. If `event_mutations` are given, we reset those event props to default values after running the user function. """ root_container = None # We run the user app in the context of the given mutations. with AppRunnerFrame(self, prev_frame_mutations=mutations) as frame: run_function = self.app_function.is_dirty() if run_function: logger.debug(f"Dirty deps: {self.app_function.get_dirty_deps()}") root_container = self.run_user_app(frame) if event_mutations: with ResetUIEventsFrame(self) as reset_frame: self.reset_event_mutations(reset_frame, event_mutations) if not run_function: with RenderFrame(self) as render_frame: self.diff_mutations_and_reply(render_frame, mutations) return 0 # We keep running the user app until there are no more # dirty mutations, or hit the run limit. num_frames = 1 while True: with AppRunnerFrame(self, prev_frame_mutations=frame.mutations) as frame: run_function = self.app_function.is_dirty() if run_function: logger.debug(f"Dirty deps: {self.app_function.get_dirty_deps()}") root_container = self.run_user_app(frame) if not run_function: with RenderFrame(self) as render_frame: self.diff_and_reply(render_frame, root_container) break num_frames += 1 if num_frames >= 20: raise RuntimeError( "Possible infinite loop detected. Stopped after 20 runs." ) return num_frames
Called only by the internal thread. Runs the user app if necessary, and sends a reply to the browser if necessary. `mutations` is a set of mutations from a prior frame. Using these `mutations`, we determine if the user function is "dirty" and needs to run again. If `event_mutations` are given, we reset those event props to default values after running the user function.
run
python
hyperdiv/hyperdiv
hyperdiv/app_runner.py
https://github.com/hyperdiv/hyperdiv/blob/master/hyperdiv/app_runner.py
Apache-2.0
def run_loop(self): """ Called only by the internal thread. The long-running thread that runs the application function in response to state updates. This thread is alive if and only if the corresponding websocket is connected. When the websocket closes, it calls `AppRunner.stop()`, causing the run loop to exit. """ # 1st frame with StateAccessFrame(self): # Add these 'singletons' to the state, because the # UI will update them immediately, and their props # need to be added to state in order to be # updated. SingletonCollector.create_singletons() # This loop runs indefinitely, until stop() is called, or it # exits due to an uncaught exception in user code. while True: # Grab updates from the queue. stop, ui_updates, task_mutations = self.process_queue(timeout=1) if ui_updates or task_mutations: self.run_id += 1 logger.debug( colored( f"######## Run {self.run_id} ########", "magenta", attrs=["bold"], ) ) with timing(f"Run {self.run_id}"): # Run the app in the context of ui updates if ui_updates: # First apply the UI updates ( ui_mutations, ui_event_mutations, ) = self.apply_ui_updates(ui_updates) # Then run the app in the context of those # mutations num_frames = self.run( ui_mutations, event_mutations=ui_event_mutations ) logger.debug(f"{num_frames} frames ran the app.") # Run the app in the context of task mutations if task_mutations: self.run(task_mutations) # Exit the thread if stop: break
Called only by the internal thread. The long-running thread that runs the application function in response to state updates. This thread is alive if and only if the corresponding websocket is connected. When the websocket closes, it calls `AppRunner.stop()`, causing the run loop to exit.
run_loop
python
hyperdiv/hyperdiv
hyperdiv/app_runner.py
https://github.com/hyperdiv/hyperdiv/blob/master/hyperdiv/app_runner.py
Apache-2.0
def run_loop_wrapper(self): """ The entrypoint to the main loop thread. Whereas `run_loop` raises unhandled exceptions, `run_loop_wrapper` catches those exceptions, prints a stacktrace, and gracefully exits the thread. """ try: self.run_loop() except Stop: pass except Exception as e: message = ( "INTERNAL ERROR!\n" + dedent("".join(traceback.format_tb(e.__traceback__))) + f"{e.__class__.__name__}: {e}" ) print(colored(message, "red"))
The entrypoint to the main loop thread. Whereas `run_loop` raises unhandled exceptions, `run_loop_wrapper` catches those exceptions, prints a stacktrace, and gracefully exits the thread.
run_loop_wrapper
python
hyperdiv/hyperdiv
hyperdiv/app_runner.py
https://github.com/hyperdiv/hyperdiv/blob/master/hyperdiv/app_runner.py
Apache-2.0
def _internal_sync(self): """ Only used in tests. Blocks until the app runner becomes idle -- input queue is empty and the task runner is idle. This method is only used in tests and assumes that while this method is running, no other user threads are adding items to the input queue. While flushing, the app runner may internally continue adding items to the input queue, like scheduling tasks or simulating ui events. """ while True: if self.input_queue.qsize() == 0 and self.task_runtime.is_empty(): return time.sleep(0.01)
Only used in tests. Blocks until the app runner becomes idle -- input queue is empty and the task runner is idle. This method is only used in tests and assumes that while this method is running, no other user threads are adding items to the input queue. While flushing, the app runner may internally continue adding items to the input queue, like scheduling tasks or simulating ui events.
_internal_sync
python
hyperdiv/hyperdiv
hyperdiv/app_runner.py
https://github.com/hyperdiv/hyperdiv/blob/master/hyperdiv/app_runner.py
Apache-2.0
def enqueue_ui_updates(self, ui_updates): """ Enqueue a batch of "ui updates". These are typically events generated by users in the browser and enqueued by `hyperdiv.connection.Connection`. They can also be simulated UI events, triggered by user code via `self.trigger_event`. """ self.input_queue.put( ( "ui_updates", [ ( key, prop_name, tuple(value) if isinstance(value, list) else value, ) for key, prop_name, value in ui_updates ], ) )
Enqueue a batch of "ui updates". These are typically events generated by users in the browser and enqueued by `hyperdiv.connection.Connection`. They can also be simulated UI events, triggered by user code via `self.trigger_event`.
enqueue_ui_updates
python
hyperdiv/hyperdiv
hyperdiv/app_runner.py
https://github.com/hyperdiv/hyperdiv/blob/master/hyperdiv/app_runner.py
Apache-2.0
def trigger_event(self, prop, value): """ Simulates a UI event. Usually called by `Component.trigger_event """ if not prop.is_event_prop: raise Exception(f"Cannot trigger event for non event prop {prop.name}") self.enqueue_ui_updates([(prop.key, prop.name, value)])
Simulates a UI event. Usually called by `Component.trigger_event
trigger_event
python
hyperdiv/hyperdiv
hyperdiv/app_runner.py
https://github.com/hyperdiv/hyperdiv/blob/master/hyperdiv/app_runner.py
Apache-2.0
def cached(fn): """ To help improve performance when building large, modular apps, Hyperdiv functions that generate UI components can be wrapped in `@cached` to avoid re-running those function calls if their read dependencies have not changed. For example: ```py @hd.cached def my_counter(label): state = hd.state(count=0) with hd.box( gap=1, padding=1, border="1px solid neutral-100", border_radius=1, ): hd.markdown(f"### {label}") hd.text(state.count) if hd.button("Increment").clicked: state.count += 1 my_counter("Counter") my_counter("Counter") ``` In this example. If we click the `Increment` button in the first counter, that first call to `my_function("Counter")` will re-run, because its read dependency on `button.clicked` is invalidated. But the second call will *not* re-run, since its read dependencies have not changed. Similarly, if we click the button in the second counter, the first call to `my_function("Counter")` will not rerun. Instead, the cached UI generated by the previous call to the function will be reused. """ @wraps(fn) def wrapper(*args, **kwargs): call_stack_key = get_component_key() qualname = f"{fn.__module__}.{fn.__name__}" cache_key = (qualname,) + hashkey(call_stack_key, *args, **kwargs) return cached_wrapper(cache_key, fn, *args, **kwargs) return wrapper
To help improve performance when building large, modular apps, Hyperdiv functions that generate UI components can be wrapped in `@cached` to avoid re-running those function calls if their read dependencies have not changed. For example: ```py @hd.cached def my_counter(label): state = hd.state(count=0) with hd.box( gap=1, padding=1, border="1px solid neutral-100", border_radius=1, ): hd.markdown(f"### {label}") hd.text(state.count) if hd.button("Increment").clicked: state.count += 1 my_counter("Counter") my_counter("Counter") ``` In this example. If we click the `Increment` button in the first counter, that first call to `my_function("Counter")` will re-run, because its read dependency on `button.clicked` is invalidated. But the second call will *not* re-run, since its read dependencies have not changed. Similarly, if we click the button in the second counter, the first call to `my_function("Counter")` will not rerun. Instead, the cached UI generated by the previous call to the function will be reused.
cached
python
hyperdiv/hyperdiv
hyperdiv/cache.py
https://github.com/hyperdiv/hyperdiv/blob/master/hyperdiv/cache.py
Apache-2.0
def cached_app(app_fn): """ Works like @cached but only for the top-level app function. Unlike @cached, it does not use a component key to generate the cache key, enabling the addition of helper functions `is_dirty`, `deps`, and `get_dirty_deps` which help AppRuntime decide when to re-run the user app, as well as print useful debugging info. """ @wraps(app_fn) def wrapper(): return cached_wrapper(make_cache_key(), app_fn) def make_cache_key(): return f"{app_fn.__module__}.{app_fn.__name__}" def get_deps(*args, **kwargs): cached_value = AppRunnerFrame.current().cache_get(make_cache_key()) if cached_value == Cache.NotFound: return None return cached_value["deps"] def is_dirty(*args, **kwargs): frame = AppRunnerFrame.current() deps = get_deps(*args, **kwargs) if deps is None: return True return frame.deps_are_dirty(deps) def get_dirty_deps(*args, **kwargs): frame = AppRunnerFrame.current() deps = get_deps(*args, **kwargs) if deps is None: return None return frame.filter_dirty_deps(deps) wrapper.is_dirty = is_dirty # Useful for debugging: wrapper.deps = get_deps wrapper.get_dirty_deps = get_dirty_deps return wrapper
Works like @cached but only for the top-level app function. Unlike @cached, it does not use a component key to generate the cache key, enabling the addition of helper functions `is_dirty`, `deps`, and `get_dirty_deps` which help AppRuntime decide when to re-run the user app, as well as print useful debugging info.
cached_app
python
hyperdiv/hyperdiv
hyperdiv/cache.py
https://github.com/hyperdiv/hyperdiv/blob/master/hyperdiv/cache.py
Apache-2.0
def current(self): """`current()` is a semi-public interface and can be used by components to introspect the parent component in which they're being collected. It skips ShadowCollectors which are internal collectors used by the cache. """ top_index = -1 while isinstance(self.stack[top_index], ShadowCollector): top_index -= 1 return self.stack[top_index]
`current()` is a semi-public interface and can be used by components to introspect the parent component in which they're being collected. It skips ShadowCollectors which are internal collectors used by the cache.
current
python
hyperdiv/hyperdiv
hyperdiv/collector.py
https://github.com/hyperdiv/hyperdiv/blob/master/hyperdiv/collector.py
Apache-2.0
def collect(self): """ Called when the component is collected into the dom. If the component was constructed with `collect=False`, this method has to be called by the user code. Otherwise it is called automatically when the component is constructed. """ if self._collected: raise ValueError("The component has already been collected.") AppRunnerFrame.current().collector_stack.collect(self) self._collected = True
Called when the component is collected into the dom. If the component was constructed with `collect=False`, this method has to be called by the user code. Otherwise it is called automatically when the component is constructed.
collect
python
hyperdiv/hyperdiv
hyperdiv/component_base.py
https://github.com/hyperdiv/hyperdiv/blob/master/hyperdiv/component_base.py
Apache-2.0
def children(self): """ Returns the list of children of this component, if this component can have children. If the component cannot have children, accessing this property raises `ValueError`. """ if self._has_children: return self._children raise ValueError(f"'{self._name}' cannot have children.")
Returns the list of children of this component, if this component can have children. If the component cannot have children, accessing this property raises `ValueError`.
children
python
hyperdiv/hyperdiv
hyperdiv/component_base.py
https://github.com/hyperdiv/hyperdiv/blob/master/hyperdiv/component_base.py
Apache-2.0
def set_prop_delayed(self, prop_name, prop_value, delay=1): """ Sets the prop with `prop_name` to the value `prop_value` after a delay of `delay` seconds. This may be useful for auto-closing an ephemeral alert, dropdown, or dialog, after being shown for some duration of time. """ from .components.task import run_asynchronously async def set_delayed(): await asyncio.sleep(delay) setattr(self, prop_name, prop_value) def cb(result=None, error=None): pass run_asynchronously(cb, set_delayed)
Sets the prop with `prop_name` to the value `prop_value` after a delay of `delay` seconds. This may be useful for auto-closing an ephemeral alert, dropdown, or dialog, after being shown for some duration of time.
set_prop_delayed
python
hyperdiv/hyperdiv
hyperdiv/component_base.py
https://github.com/hyperdiv/hyperdiv/blob/master/hyperdiv/component_base.py
Apache-2.0
def reset_prop_delayed(self, prop_name, delay=1): """ Like `set_prop_delayed` but instead of mutating the prop it resets it to its initial value. """ from .components.task import run_asynchronously async def reset_delayed(): await asyncio.sleep(delay) self.reset_prop(prop_name) def cb(result=None, error=None): pass run_asynchronously(cb, reset_delayed)
Like `set_prop_delayed` but instead of mutating the prop it resets it to its initial value.
reset_prop_delayed
python
hyperdiv/hyperdiv
hyperdiv/component_base.py
https://github.com/hyperdiv/hyperdiv/blob/master/hyperdiv/component_base.py
Apache-2.0
def reset_component_delayed(self, delay=1): """ Like `reset_prop_delayed` but resets all component props. """ from .components.task import run_asynchronously async def reset_delayed(): await asyncio.sleep(delay) self.reset_component() def cb(result=None, error=None): pass run_asynchronously(cb, reset_delayed)
Like `reset_prop_delayed` but resets all component props.
reset_component_delayed
python
hyperdiv/hyperdiv
hyperdiv/component_base.py
https://github.com/hyperdiv/hyperdiv/blob/master/hyperdiv/component_base.py
Apache-2.0
def global_state(klass): """ `global_state` is a decorator that can be be used to define a state component class such that all instances of that class share the same underlying state. This can be handy when a state component is used by many functions, and you want to avoid explicitly passing the state component into all those functions. This decorator can be used on subclasses of @component(BaseState) and @component(task). ```py-nodemo @hd.global_state class MyState(hd.BaseState): count = hd.Prop(hd.Int, 0) def increment(): state = MyState() if hd.button("Increment").clicked: state.count += 1 def display(): state = MyState() hd.text(state.count) def main(): increment() display() ``` In this example, both `MyState()` instances share the same state. So when the increment button in the `increment` component is clicked, the count label displayed by the `display` component is updated. ## Use with `task` The `global_state` decorator can also be used on a subclass of @component(task) to make a task global. ```py-nodemo @hd.global_state class UsersTask(hd.task): def run(self): super().run(sql, "select * from Users") def users_list(): task = UsersTask() task.run() if task.result: for u in task.result: with hd.scope(u.user_id): hd.text(u.name) def reload_button(): task = UsersTask() if hd.button("Reload").clicked: task.clear() def main(): users_list() reload_button() ``` In this example, both instances of `UsersTask()` share the same task state. When the `Reload` button in `reload_button` is clicked, the task re-runs and the users list in `users_list` is re-rendered. """ global global_key_id if not issubclass(klass, BaseState): raise ValueError("You cannot use `@global_state` with this class.") klass._key = f"global-state-{global_key_id}" global_key_id += 1 return klass
`global_state` is a decorator that can be be used to define a state component class such that all instances of that class share the same underlying state. This can be handy when a state component is used by many functions, and you want to avoid explicitly passing the state component into all those functions. This decorator can be used on subclasses of @component(BaseState) and @component(task). ```py-nodemo @hd.global_state class MyState(hd.BaseState): count = hd.Prop(hd.Int, 0) def increment(): state = MyState() if hd.button("Increment").clicked: state.count += 1 def display(): state = MyState() hd.text(state.count) def main(): increment() display() ``` In this example, both `MyState()` instances share the same state. So when the increment button in the `increment` component is clicked, the count label displayed by the `display` component is updated. ## Use with `task` The `global_state` decorator can also be used on a subclass of @component(task) to make a task global. ```py-nodemo @hd.global_state class UsersTask(hd.task): def run(self): super().run(sql, "select * from Users") def users_list(): task = UsersTask() task.run() if task.result: for u in task.result: with hd.scope(u.user_id): hd.text(u.name) def reload_button(): task = UsersTask() if hd.button("Reload").clicked: task.clear() def main(): users_list() reload_button() ``` In this example, both instances of `UsersTask()` share the same task state. When the `Reload` button in `reload_button` is clicked, the task re-runs and the users list in `users_list` is re-rendered.
global_state
python
hyperdiv/hyperdiv
hyperdiv/global_state.py
https://github.com/hyperdiv/hyperdiv/blob/master/hyperdiv/global_state.py
Apache-2.0
def index_page( title="Hyperdiv", description=None, keywords=None, url=None, image=None, twitter_card_type="summary_large_image", favicon="/hd-logo-white.svg", favicon_16=None, favicon_32=None, apple_touch_icon=None, css_assets=(), js_assets=(), raw_head_content="", ): """ This function generates the app's HTML index page that is served to the browser when users load the app's URL. It generates SEO meta tags as well as Twitter (`twitter:`) and Meta OpenGraph (`og:`) tags, so Twitter/Meta will generate nice-looking preview cards when the app is shared on these platforms. Custom Javascript and CSS assets can also be added to the index page. More on this below. Passing `title`, `description`, `url`, `favicon`, and `image`, should be enough to generate a useful set of meta tags. This function's only use is to pass its return value into the `index_page` parameter of Hyperdiv's @component(run) function. ## Parameters * `title`: The title of the app. * `description`: A short one-line description of the app. * `keywords`: An iterable of short keywords describing the app, or a comma-separated string of keywords. * `url`: The external full URL of the app, for example `"https://my-app.foo.com"`. * `image`: A full URL to an image that should be included in previews when sharing the app on social media. For example `"https://my-app.foo.com/my-app-image.png"`. * `twitter_card_type`: One of `"summary"` or `"summary_large_image"`. The former causes Twitter to render a smaller card with the image to the left of the title/description, when the app's URL is shared on Twitter. The latter causes a larger card to be rendered, where the image is prominently displayed above the title/description. * `favicon`: A URL pointing to a favicon. Can be a local URL like `"/assets/favicon.png"`. The favicon is an icon displayed next to the title in browser tab headers. * `favicon_16`: A URL pointing to the 16x16px version of the favicon. * `favicon_32`: A URL pointing to the 32x32px version of the favicon. * `apple_touch_icon`: A URL pointing to the Apple touch icon. This is a favicon specifically used by Apple software in certain situations. A recommended size is 180x180px. If this isn't specified, the favicon will be used. * `css_assets`: Custom CSS assets to load into the index page. * `js_assets`: Custom Javascript assets to load into the index page. * `raw_head_content`: A string of arbitrary content to add to the `<head>` tag of the generated index page. ## Loading Custom Assets The `css_assets`, `js_assets`, and `raw_head_content` parameters can be used to load custom local or remote assets into the index page. ```py-nodemo hd.run(main, index_page=hd.index_page( js_assets=[ # A local Javascript asset: "/assets/my-script.js", # A remote Javascript asset: "https://foo.com/remote-script.js" ], css_assets=[ # A local CSS asset "/assets/my-styles.css", "https//foo.com/remote-styles.css", ] )) ``` Hyperdiv will generate basic `<script>` and `<link>` tags to load these scripts. ### Custom Attributes Instead of a string, you can pass a dictionary mapping attributes to values. This can be useful when you want to add extra attributes that Hyperdiv does not add by default: ```py-nodemo hd.run(main, index_page=hd.index_page( js_assets=[dict( defer=True src="https://foo.com/remote-script.js?x=1", )] )) ``` The code above will generate the tag `<script defer src="https://foo.com/remote-script.js?x=1"></script>`. When you use a dictionary, Hyperdiv will not set any attributes implicitly. For example to properly load a CSS asset, you should set the `rel` attribute: ```py-nodemo hd.run(main, index_page=hd.index_page( js_assets=[dict( rel="stylesheet", href="/assets/custom-styles.css", )] )) ``` ### Raw Head Content If the options above don't fit your use case, you can use the `raw_head_content` argument to cause Hyperdiv to insert a string into the page's `<head>` tag: ```py-nodemo hd.run(main, index_page=hd.index_page( raw_head_content=( ''' <link rel="stylesheet" href="/assets/my-styles.css" /> <script defer src="https://foo.com/remote-script.js"></script> <script> console.log("Hello world!") </script> ''' ) )) ``` Note that Hyperdiv will re-indent this string to try to match the indentation of the index page. Hyperdiv inserts custom head content in this order: 1. The tags generated by `css_assets`, in the order they are specified, if any. 2. Followed by the tags generated by `js_assets`, in the order they are specified, if any. 3. Followed The content specified by `raw_head_content`. """ head_tags = [] for css_tag in css_assets: if isinstance(css_tag, dict): head_tags.append(css_tag_from_dict(css_tag)) else: head_tags.append(css_tag_from_url(css_tag)) for js_tag in js_assets: if isinstance(js_tag, dict): head_tags.append(js_tag_from_dict(js_tag)) else: head_tags.append(js_tag_from_url(js_tag)) if head_tags: raw_head_content = "\n".join(head_tags + [dedent(raw_head_content).strip()]) template_contents = index_page_template( title=title, description=description, keywords=keywords, url=url, image=image, twitter_card_type=twitter_card_type, favicon=favicon, favicon_16=favicon_16, favicon_32=favicon_32, apple_touch_icon=apple_touch_icon, raw_head_content=raw_head_content, ) template = Template(template_contents) return template.render(body="", style="")
This function generates the app's HTML index page that is served to the browser when users load the app's URL. It generates SEO meta tags as well as Twitter (`twitter:`) and Meta OpenGraph (`og:`) tags, so Twitter/Meta will generate nice-looking preview cards when the app is shared on these platforms. Custom Javascript and CSS assets can also be added to the index page. More on this below. Passing `title`, `description`, `url`, `favicon`, and `image`, should be enough to generate a useful set of meta tags. This function's only use is to pass its return value into the `index_page` parameter of Hyperdiv's @component(run) function. ## Parameters * `title`: The title of the app. * `description`: A short one-line description of the app. * `keywords`: An iterable of short keywords describing the app, or a comma-separated string of keywords. * `url`: The external full URL of the app, for example `"https://my-app.foo.com"`. * `image`: A full URL to an image that should be included in previews when sharing the app on social media. For example `"https://my-app.foo.com/my-app-image.png"`. * `twitter_card_type`: One of `"summary"` or `"summary_large_image"`. The former causes Twitter to render a smaller card with the image to the left of the title/description, when the app's URL is shared on Twitter. The latter causes a larger card to be rendered, where the image is prominently displayed above the title/description. * `favicon`: A URL pointing to a favicon. Can be a local URL like `"/assets/favicon.png"`. The favicon is an icon displayed next to the title in browser tab headers. * `favicon_16`: A URL pointing to the 16x16px version of the favicon. * `favicon_32`: A URL pointing to the 32x32px version of the favicon. * `apple_touch_icon`: A URL pointing to the Apple touch icon. This is a favicon specifically used by Apple software in certain situations. A recommended size is 180x180px. If this isn't specified, the favicon will be used. * `css_assets`: Custom CSS assets to load into the index page. * `js_assets`: Custom Javascript assets to load into the index page. * `raw_head_content`: A string of arbitrary content to add to the `<head>` tag of the generated index page. ## Loading Custom Assets The `css_assets`, `js_assets`, and `raw_head_content` parameters can be used to load custom local or remote assets into the index page. ```py-nodemo hd.run(main, index_page=hd.index_page( js_assets=[ # A local Javascript asset: "/assets/my-script.js", # A remote Javascript asset: "https://foo.com/remote-script.js" ], css_assets=[ # A local CSS asset "/assets/my-styles.css", "https//foo.com/remote-styles.css", ] )) ``` Hyperdiv will generate basic `<script>` and `<link>` tags to load these scripts. ### Custom Attributes Instead of a string, you can pass a dictionary mapping attributes to values. This can be useful when you want to add extra attributes that Hyperdiv does not add by default: ```py-nodemo hd.run(main, index_page=hd.index_page( js_assets=[dict( defer=True src="https://foo.com/remote-script.js?x=1", )] )) ``` The code above will generate the tag `<script defer src="https://foo.com/remote-script.js?x=1"></script>`. When you use a dictionary, Hyperdiv will not set any attributes implicitly. For example to properly load a CSS asset, you should set the `rel` attribute: ```py-nodemo hd.run(main, index_page=hd.index_page( js_assets=[dict( rel="stylesheet", href="/assets/custom-styles.css", )] )) ``` ### Raw Head Content If the options above don't fit your use case, you can use the `raw_head_content` argument to cause Hyperdiv to insert a string into the page's `<head>` tag: ```py-nodemo hd.run(main, index_page=hd.index_page( raw_head_content=( ''' <link rel="stylesheet" href="/assets/my-styles.css" /> <script defer src="https://foo.com/remote-script.js"></script> <script> console.log("Hello world!") </script> ''' ) )) ``` Note that Hyperdiv will re-indent this string to try to match the indentation of the index page. Hyperdiv inserts custom head content in this order: 1. The tags generated by `css_assets`, in the order they are specified, if any. 2. Followed by the tags generated by `js_assets`, in the order they are specified, if any. 3. Followed The content specified by `raw_head_content`.
index_page
python
hyperdiv/hyperdiv
hyperdiv/index_page.py
https://github.com/hyperdiv/hyperdiv/blob/master/hyperdiv/index_page.py
Apache-2.0
def run(app_function, task_threads=10, executor=None, index_page=None): """ The entrypoint into Hyperdiv. When calling `run(app_function)`, Hyperdiv will start a web server ready to serve the app defined by `app_function`, on a local port. A user can connect a web browser to this port to interact with the app. The call to `run` will block until Hyperdiv exits. Hyperdiv listens for signals SIGINT and SIGTERM and will cleanly exit the web server when receiving one of those signals. For example, pressing Ctrl-C in the terminal where Hyperdiv is running will cause Hyperdiv to exit. Parameters: * `app_function`: The function implementing the Hyperdiv app. * `task_threads`: The number of threads to run in the internal [ThreadPoolExecutor](https://docs.python.org/3/library/concurrent.futures.html) used for running asynchronous @component(task) functions. * `executor`: A [ThreadPoolExecutor](https://docs.python.org/3/library/concurrent.futures.html) in which to run @component(task) functions. If this argument is non-`None`, `task_threads` will be ignored. * `index_page`: An index page generated with @component(index_page). * `port`: The port on which to start the web server. By default, the port is `8888`. Alternatively, the port can be set with the `HD_PORT` environment variable. """ port = get_port() task_runtime = TaskRuntime(task_threads, executor=executor) server = Server( port, app_function, task_runtime, index_page=index_page or create_index_page(), ) try: server.listen() except Exception as e: print(f"Failed to start on port {server.port}. {e}") print( "Try using a different port:", colored(f"HD_PORT=[port] python {sys.argv[0]}", "blue"), ) task_runtime.shutdown() sys.exit(1) if PRODUCTION_LOCAL: open_browser(server.port) server.start() # At this point, the server has shut down. task_runtime.shutdown()
The entrypoint into Hyperdiv. When calling `run(app_function)`, Hyperdiv will start a web server ready to serve the app defined by `app_function`, on a local port. A user can connect a web browser to this port to interact with the app. The call to `run` will block until Hyperdiv exits. Hyperdiv listens for signals SIGINT and SIGTERM and will cleanly exit the web server when receiving one of those signals. For example, pressing Ctrl-C in the terminal where Hyperdiv is running will cause Hyperdiv to exit. Parameters: * `app_function`: The function implementing the Hyperdiv app. * `task_threads`: The number of threads to run in the internal [ThreadPoolExecutor](https://docs.python.org/3/library/concurrent.futures.html) used for running asynchronous @component(task) functions. * `executor`: A [ThreadPoolExecutor](https://docs.python.org/3/library/concurrent.futures.html) in which to run @component(task) functions. If this argument is non-`None`, `task_threads` will be ignored. * `index_page`: An index page generated with @component(index_page). * `port`: The port on which to start the web server. By default, the port is `8888`. Alternatively, the port can be set with the `HD_PORT` environment variable.
run
python
hyperdiv/hyperdiv
hyperdiv/main.py
https://github.com/hyperdiv/hyperdiv/blob/master/hyperdiv/main.py
Apache-2.0
def render(self): """ The JSON-rendered form of the plugin that is sent to the browser. """ klass = type(self) plugin_name = getattr(klass, "_name", None) or klass.__name__ plugin_config = PluginAssetsCollector.plugin_assets.get(plugin_name, {}) assets_root = plugin_config.get("assets_root") assets_paths = plugin_config.get("assets", []) output = super().render() if assets_root: output["assetsRoot"] = f"{PLUGINS_PREFIX}/{plugin_name}" output["assets"] = [] for asset_type, asset_path in assets_paths: if asset_type in ("css", "js") or is_url(asset_path): output["assets"].append((asset_type, asset_path)) else: output["assets"].append( (asset_type, f"{PLUGINS_PREFIX}/{plugin_name}/{asset_path}") ) return output
The JSON-rendered form of the plugin that is sent to the browser.
render
python
hyperdiv/hyperdiv
hyperdiv/plugin.py
https://github.com/hyperdiv/hyperdiv/blob/master/hyperdiv/plugin.py
Apache-2.0
def __get__(self, component, objtype): """Called when the prop attribute is read.""" if component is None: return self return StateAccessFrame.current().get_state(component._key, self.name)
Called when the prop attribute is read.
__get__
python
hyperdiv/hyperdiv
hyperdiv/prop.py
https://github.com/hyperdiv/hyperdiv/blob/master/hyperdiv/prop.py
Apache-2.0
def __init__(self, key, prop): """`key` is the key of the component to which this prop is attached. The other args come from `Prop`. See `create()` below. """ self.key = key self.prop = prop self.name = prop.name self.ui_name = prop.ui_name self.camlcase_ui_name = prop.camlcase_ui_name self.prop_type = prop.prop_type self.default_value = prop.default_value self.backend_immutable = prop.backend_immutable self.internal = prop.internal # The value of the prop. This value starts off as Unset and is # set in init() when the component is first instantiated. self.value = StoredProp.Unset # Whether this prop has been mutated. self.mutated = False # The value this prop would have if it hadn't been mutated (or # the value it has now, if it hasn't yet been mutated). This # attribute starts off as Unset and is set in init() when the # component is first instantiated. self.init_value = StoredProp.Unset # Whether this prop is a resettable event prop like `clicked` self.is_event_prop = isinstance(prop.prop_type, Event) if self.is_event_prop: self.internal = True # Whether this is a CSS/style prop, which will get translated # to CSS instead of a component attribute. self.is_css_prop = isinstance(prop.prop_type, CSS) # The name that is shipped to the UI. Some shoelace attributes # are named `type` and `open` which are Python # keywords/built-ins. In that case, they'll be named # `item_type`, `opened` etc. on the Python side. if prop.ui_name: self.ui_name = prop.ui_name elif self.camlcase_ui_name: self.ui_name = to_caml_case(prop.name) else: self.ui_name = prop.name
`key` is the key of the component to which this prop is attached. The other args come from `Prop`. See `create()` below.
__init__
python
hyperdiv/hyperdiv
hyperdiv/prop.py
https://github.com/hyperdiv/hyperdiv/blob/master/hyperdiv/prop.py
Apache-2.0
def dict_factory(cursor, row): """Row factory that returns dicts mapping column name to column value, as opposed to the default factory, which returns tuples. """ d = {} for idx, col in enumerate(cursor.description): d[col[0]] = row[idx] return d
Row factory that returns dicts mapping column name to column value, as opposed to the default factory, which returns tuples.
dict_factory
python
hyperdiv/hyperdiv
hyperdiv/sqlite.py
https://github.com/hyperdiv/hyperdiv/blob/master/hyperdiv/sqlite.py
Apache-2.0
def migrate(db_path, migrations): """A migration is a function that takes a cursor and uses it to modify the db in some way. `migrations` is a list of such functions. """ with sqlite_tx(db_path) as (_, cursor): # If the _Migration table doesn't exist, create it. try: cursor.execute("select * from _Migration") table_exists = True except sqlite3.OperationalError as e: if "no such table" in str(e): table_exists = False else: raise if not table_exists: cursor.execute("create table _Migration (migration_id integer not null)") cursor.execute("insert into _Migration (migration_id) values (?)", (0,)) # The migration_id indicates the number/position in the # migrations list of the most recent migration applied. We # apply the rest of the migrations in the list, if any. cursor.execute("select migration_id from _Migration") rows = cursor.fetchall() migration_id = rows[0]["migration_id"] if len(migrations) < migration_id: raise Exception("The migration list got smaller.") migrations_to_apply = migrations[migration_id:] print(f"Applying {len(migrations_to_apply)} migrations.") for migration in migrations_to_apply: migration(cursor) cursor.execute("update _Migration set migration_id = migration_id + 1")
A migration is a function that takes a cursor and uses it to modify the db in some way. `migrations` is a list of such functions.
migrate
python
hyperdiv/hyperdiv
hyperdiv/sqlite.py
https://github.com/hyperdiv/hyperdiv/blob/master/hyperdiv/sqlite.py
Apache-2.0
def code(code_block, language="python", **kwargs): """ Calls @component(markdown) to render the given code block. `**kwargs` are passed down to @component(markdown). `language` can be the short name of any lexer supported by [Pygments](https://pygments.org/docs/lexers/). ```py hd.code( ''' def f(x, y): return x + y ''' ) ``` ```py hd.code( ''' async function hello() { const a = await f("foo"); const b = await g("bar"); return a + b; } ''', language="javascript" ) ``` """ markdown(f"```{language}\n{dedent(code_block)}\n```", **kwargs)
Calls @component(markdown) to render the given code block. `**kwargs` are passed down to @component(markdown). `language` can be the short name of any lexer supported by [Pygments](https://pygments.org/docs/lexers/). ```py hd.code( ''' def f(x, y): return x + y ''' ) ``` ```py hd.code( ''' async function hello() { const a = await f("foo"); const b = await g("bar"); return a + b; } ''', language="javascript" ) ```
code
python
hyperdiv/hyperdiv
hyperdiv/components/code.py
https://github.com/hyperdiv/hyperdiv/blob/master/hyperdiv/components/code.py
Apache-2.0
def __init__(self, disabled=False, gap=1, **kwargs): """ If `disabled` is `True`, all the form inputs will be rendered disabled, overriding the individual `disabled` kwargs passed to each input. If you mutated the `disabled` prop on any of the form inputs, that mutated value will take precedence. The rest of the kwargs are passed to the `box` superclass. """ super().__init__(gap=gap, **kwargs) self.disabled = disabled self.form_controls = [] self.is_valid = True self.names = set() if self._submit_clicked: self._being_submitted = True
If `disabled` is `True`, all the form inputs will be rendered disabled, overriding the individual `disabled` kwargs passed to each input. If you mutated the `disabled` prop on any of the form inputs, that mutated value will take precedence. The rest of the kwargs are passed to the `box` superclass.
__init__
python
hyperdiv/hyperdiv
hyperdiv/components/form.py
https://github.com/hyperdiv/hyperdiv/blob/master/hyperdiv/components/form.py
Apache-2.0
def checkbox(self, *label, wrapper_style=None, **kwargs): """ Adds a @component(checkbox) component to the form. The `wrapper_style` argument can be a @component(style) instance to control the style style of the internal container that wraps the form input + the validation error message. The `**kwargs` are passed on to the @component(checkbox) constructor. """ return self._form_control( checkbox, *label, wrapper_style=wrapper_style, **kwargs )
Adds a @component(checkbox) component to the form. The `wrapper_style` argument can be a @component(style) instance to control the style style of the internal container that wraps the form input + the validation error message. The `**kwargs` are passed on to the @component(checkbox) constructor.
checkbox
python
hyperdiv/hyperdiv
hyperdiv/components/form.py
https://github.com/hyperdiv/hyperdiv/blob/master/hyperdiv/components/form.py
Apache-2.0
def color_picker(self, wrapper_style=None, **kwargs): """Adds a @component(color_picker) component to the form.""" return self._form_control( color_picker, has_label=False, wrapper_style=wrapper_style, **kwargs )
Adds a @component(color_picker) component to the form.
color_picker
python
hyperdiv/hyperdiv
hyperdiv/components/form.py
https://github.com/hyperdiv/hyperdiv/blob/master/hyperdiv/components/form.py
Apache-2.0
def text_input(self, *label, wrapper_style=None, **kwargs): """Adds a @component(text_input) component to the form.""" return self._form_control( text_input, *label, wrapper_style=wrapper_style, **kwargs )
Adds a @component(text_input) component to the form.
text_input
python
hyperdiv/hyperdiv
hyperdiv/components/form.py
https://github.com/hyperdiv/hyperdiv/blob/master/hyperdiv/components/form.py
Apache-2.0
def textarea(self, *label, wrapper_style=None, **kwargs): """Adds a @component(textarea) component to the form.""" return self._form_control( textarea, *label, wrapper_style=wrapper_style, **kwargs )
Adds a @component(textarea) component to the form.
textarea
python
hyperdiv/hyperdiv
hyperdiv/components/form.py
https://github.com/hyperdiv/hyperdiv/blob/master/hyperdiv/components/form.py
Apache-2.0
def radio_group(self, *label, wrapper_style=None, **kwargs): """Adds a @component(radio_group) component to the form.""" return self._form_control( radio_group, *label, wrapper_style=wrapper_style, **kwargs )
Adds a @component(radio_group) component to the form.
radio_group
python
hyperdiv/hyperdiv
hyperdiv/components/form.py
https://github.com/hyperdiv/hyperdiv/blob/master/hyperdiv/components/form.py
Apache-2.0
def reset(self): """ A way to programmatically reset the form. """ if self._being_submitted: logger.warn("Cannot reset a form while it is submitting.") return for fc in self.form_controls: fc.reset()
A way to programmatically reset the form.
reset
python
hyperdiv/hyperdiv
hyperdiv/components/form.py
https://github.com/hyperdiv/hyperdiv/blob/master/hyperdiv/components/form.py
Apache-2.0
def ui_read(target, command, args): """Invoke a UI read command. A read will send the command to the UI and return a result object in `running` state. Calling this read again on subsequent frames will *not* re-send the read call to the UI. It will remain in `done` state and return the same value over and over. To trigger a re-read, you can call `clear()` on the returned `async_command` object, which resets the running/done props and causes the read to be sent again on the next frame. """ result = async_command() sent = False if not result.running and not result.done: UICommand.send(result._key, target, command, args) result.running = True sent = True return result, sent
Invoke a UI read command. A read will send the command to the UI and return a result object in `running` state. Calling this read again on subsequent frames will *not* re-send the read call to the UI. It will remain in `done` state and return the same value over and over. To trigger a re-read, you can call `clear()` on the returned `async_command` object, which resets the running/done props and causes the read to be sent again on the next frame.
ui_read
python
hyperdiv/hyperdiv
hyperdiv/components/local_storage.py
https://github.com/hyperdiv/hyperdiv/blob/master/hyperdiv/components/local_storage.py
Apache-2.0
def ui_write(target, command, args): """Invoke a UI write command. Unlike a read, a write call will send the command to the UI on every call. Intuitively, writes should not be called on every frame, but rather only in response to events like `clicked`. Writes still return a `async_command` object that can be inspected to determine the status of the write. However, note that if you immediately invoke the same write again, before the previous has finished, the `async_command` object will likely be updated by the 1st (unfinished) call, and then again by the 2nd. """ result = async_command() result.clear() UICommand.send(result._key, target, command, args) result.running = True return result
Invoke a UI write command. Unlike a read, a write call will send the command to the UI on every call. Intuitively, writes should not be called on every frame, but rather only in response to events like `clicked`. Writes still return a `async_command` object that can be inspected to determine the status of the write. However, note that if you immediately invoke the same write again, before the previous has finished, the `async_command` object will likely be updated by the 1st (unfinished) call, and then again by the 2nd.
ui_write
python
hyperdiv/hyperdiv
hyperdiv/components/local_storage.py
https://github.com/hyperdiv/hyperdiv/blob/master/hyperdiv/components/local_storage.py
Apache-2.0
def get_item(key): """ Calls the browser's `window.localStorage.getItem(key)`. """ result, sent = ui_read("localStorage", "getItem", [key]) if sent: local_storage._cache_result(key, result) return result
Calls the browser's `window.localStorage.getItem(key)`.
get_item
python
hyperdiv/hyperdiv
hyperdiv/components/local_storage.py
https://github.com/hyperdiv/hyperdiv/blob/master/hyperdiv/components/local_storage.py
Apache-2.0
def has_item(key): """ Tests if a key exists in the browser's localStorage. The returned @component(async_command)'s `result` prop is set to `True` if the given key exists in the browser's localStorage, or `False` otherwise. """ result, sent = ui_read("localStorage", "hasItem", [key]) if sent: local_storage._cache_result(key, result) return result
Tests if a key exists in the browser's localStorage. The returned @component(async_command)'s `result` prop is set to `True` if the given key exists in the browser's localStorage, or `False` otherwise.
has_item
python
hyperdiv/hyperdiv
hyperdiv/components/local_storage.py
https://github.com/hyperdiv/hyperdiv/blob/master/hyperdiv/components/local_storage.py
Apache-2.0
def set_item(key, value): """ Calls the browser's `window.localStorage.setItem(key, value)`. """ if not isinstance(value, str): raise ValueError("local_storage.set_item can only store strings.") result = ui_write("localStorage", "setItem", [key, value]) local_storage._clear_cache_at_key(key) return result
Calls the browser's `window.localStorage.setItem(key, value)`.
set_item
python
hyperdiv/hyperdiv
hyperdiv/components/local_storage.py
https://github.com/hyperdiv/hyperdiv/blob/master/hyperdiv/components/local_storage.py
Apache-2.0
def remove_item(key): """ Calls the browser's `window.localStorage.removeItem(key)`. """ result = ui_write("localStorage", "removeItem", [key]) local_storage._clear_cache_at_key(key) return result
Calls the browser's `window.localStorage.removeItem(key)`.
remove_item
python
hyperdiv/hyperdiv
hyperdiv/components/local_storage.py
https://github.com/hyperdiv/hyperdiv/blob/master/hyperdiv/components/local_storage.py
Apache-2.0
def clear(): """ Calls the browser's `window.localStorage.clear()`, removing all the keys from localStorage. """ result = ui_write("localStorage", "clear", []) local_storage._clear_cache() return result
Calls the browser's `window.localStorage.clear()`, removing all the keys from localStorage.
clear
python
hyperdiv/hyperdiv
hyperdiv/components/local_storage.py
https://github.com/hyperdiv/hyperdiv/blob/master/hyperdiv/components/local_storage.py
Apache-2.0
def go(self, path, query_args="", hash_arg=""): """ Change the browser location bar by simultaneously mutating all three props. For example, `go(path="/foo", hash_arg="bar")` will set the location to `"/foo#bar"`. If `query_args` is currently set to a value, if will be set to `""`. If you need to programmatically mutate the location, this is the recommended way to do it. If instead you mutate individual props, say `location().path = "/foo"`, that will only change the path prop, and let the other props remain unchanged, which is probably not what you want. """ self.path = path self.query_args = query_args self.hash_arg = hash_arg
Change the browser location bar by simultaneously mutating all three props. For example, `go(path="/foo", hash_arg="bar")` will set the location to `"/foo#bar"`. If `query_args` is currently set to a value, if will be set to `""`. If you need to programmatically mutate the location, this is the recommended way to do it. If instead you mutate individual props, say `location().path = "/foo"`, that will only change the path prop, and let the other props remain unchanged, which is probably not what you want.
go
python
hyperdiv/hyperdiv
hyperdiv/components/location.py
https://github.com/hyperdiv/hyperdiv/blob/master/hyperdiv/components/location.py
Apache-2.0
def to_string(self): """ Returns a string of the full location, suitable for pasting into the browser's location bar. """ string = f"{self.protocol}//{self.host}{self.path}" if self.query_args: string += f"?{self.query_args}" if self.hash_arg: string += f"#{self.hash_arg}" return string
Returns a string of the full location, suitable for pasting into the browser's location bar.
to_string
python
hyperdiv/hyperdiv
hyperdiv/components/location.py
https://github.com/hyperdiv/hyperdiv/blob/master/hyperdiv/components/location.py
Apache-2.0
def scope(scope_id): """ When creating components in loops, you will encounter this error: ```py for i in range(3): hd.button("Button", i) ``` To fix it, you wrap the loop body in `hd.scope(i)`, where `i` uniquely identifies each iteration: ```py for i in range(3): with hd.scope(i): hd.button("Button", i) ``` The reason behind having to use `scope` is that Hyperdiv identifies each component uniquely based on the line number on which it is constructed, in the code. In the first example, all the buttons are constructed on the same line of code, so their identifiers clash and Hyperdiv raises an error. `hd.scope(i)` gives Hyperdiv extra "uniqueness" information to include in the identifier. In this case, `i` is unique for each loop iteration, allowing Hyperdiv to create unique identifiers for the three buttons. ## Choosing Good Scope IDs Using the loop iteration index, like in the example above, is fine for data that does not change. For data that can be sorted, edited, or deleted, we need to use an identifier that is unique to each data item. ```py state = hd.state(users=( ("Mary", False), ("Joe", False), ("Amy", False) )) for i, (name, selected) in enumerate(state.users): with hd.scope(i): with hd.hbox(): hd.text(name, width=10) hd.checkbox(checked=selected) with hd.hbox(gap=1): if hd.button("Reverse").clicked: state.users = tuple(reversed(state.users)) ``` In the example above, we render a list of users along with "selected" checkboxes associated with each user, in a loop wrapped in `scope(i)`, which is the iteration index. If you check the checkbox next to `Mary`, and then click `Reverse`, the list will be reversed but `Amy` will now be wrongly selected. This is because the checkbox identifier is derived from `hd.scope(i)`, and `i` remains the same regardless of how the list is sorted. To fix this, we associate a unique user ID with each user record, and use this ID as the scope ID: ```py state = hd.state(users=( (123, "Mary", False), (456, "Joe", False), (789, "Amy", False) )) for user_id, name, selected in state.users: with hd.scope(user_id): with hd.hbox(): hd.text(name, width=10) hd.checkbox(checked=selected) with hd.hbox(gap=1): if hd.button("Reverse").clicked: state.users = tuple(reversed(state.users)) ``` When working with databases, this is an easy guideline to follow. When rendering a list of database records, wrap the loop body in a scope identified by each record's primary key: ```py-nodemo users = database.get_users() for user in users: with hd.scope(user.user_id): render_user(user) ``` """ @contextmanager def scope_generator(): frame = AppRunnerFrame.current() frame.push_scope(scope_id) try: yield finally: frame.pop_scope() return scope_generator()
When creating components in loops, you will encounter this error: ```py for i in range(3): hd.button("Button", i) ``` To fix it, you wrap the loop body in `hd.scope(i)`, where `i` uniquely identifies each iteration: ```py for i in range(3): with hd.scope(i): hd.button("Button", i) ``` The reason behind having to use `scope` is that Hyperdiv identifies each component uniquely based on the line number on which it is constructed, in the code. In the first example, all the buttons are constructed on the same line of code, so their identifiers clash and Hyperdiv raises an error. `hd.scope(i)` gives Hyperdiv extra "uniqueness" information to include in the identifier. In this case, `i` is unique for each loop iteration, allowing Hyperdiv to create unique identifiers for the three buttons. ## Choosing Good Scope IDs Using the loop iteration index, like in the example above, is fine for data that does not change. For data that can be sorted, edited, or deleted, we need to use an identifier that is unique to each data item. ```py state = hd.state(users=( ("Mary", False), ("Joe", False), ("Amy", False) )) for i, (name, selected) in enumerate(state.users): with hd.scope(i): with hd.hbox(): hd.text(name, width=10) hd.checkbox(checked=selected) with hd.hbox(gap=1): if hd.button("Reverse").clicked: state.users = tuple(reversed(state.users)) ``` In the example above, we render a list of users along with "selected" checkboxes associated with each user, in a loop wrapped in `scope(i)`, which is the iteration index. If you check the checkbox next to `Mary`, and then click `Reverse`, the list will be reversed but `Amy` will now be wrongly selected. This is because the checkbox identifier is derived from `hd.scope(i)`, and `i` remains the same regardless of how the list is sorted. To fix this, we associate a unique user ID with each user record, and use this ID as the scope ID: ```py state = hd.state(users=( (123, "Mary", False), (456, "Joe", False), (789, "Amy", False) )) for user_id, name, selected in state.users: with hd.scope(user_id): with hd.hbox(): hd.text(name, width=10) hd.checkbox(checked=selected) with hd.hbox(gap=1): if hd.button("Reverse").clicked: state.users = tuple(reversed(state.users)) ``` When working with databases, this is an easy guideline to follow. When rendering a list of database records, wrap the loop body in a scope identified by each record's primary key: ```py-nodemo users = database.get_users() for user in users: with hd.scope(user.user_id): render_user(user) ```
scope
python
hyperdiv/hyperdiv
hyperdiv/components/scope.py
https://github.com/hyperdiv/hyperdiv/blob/master/hyperdiv/components/scope.py
Apache-2.0
def __init__( self, *label, options=None, name=None, prefix_icon=None, clear_icon=None, expand_icon=None, **kwargs, ): """ If `options` is given as an iterable of option labels, @component(option) components will be automatically created for each given option. """ if name is None: name = concat_text(label) super().__init__(*label, name=name, **kwargs) with self: if options: for o in options: with scope(o): option(o, value=o.replace(" ", "_")) if prefix_icon: icon(prefix_icon, slot=self.prefix) if clear_icon: icon(clear_icon, slot=self.clear_icon_slot) if expand_icon: icon(expand_icon, slot=self.expand_icon_slot)
If `options` is given as an iterable of option labels, @component(option) components will be automatically created for each given option.
__init__
python
hyperdiv/hyperdiv
hyperdiv/components/select_.py
https://github.com/hyperdiv/hyperdiv/blob/master/hyperdiv/components/select_.py
Apache-2.0
def run(self, fn, *args, **kwargs): """ Run `fn(*args, **kwargs)` on a separate thread (or ioloop if the function is `async`). """ run_number = self._run_number def result_callback(result=None, error=None): if self._run_number != run_number: logger.warn( f"The task {fn}({args}, {kwargs}) was cleared/restarted " "before the previous run could finish." ) return self.result = result self.error = error self.done = True self.running = False self.trigger_event("finished", True) if not self.running and not self.done: self.running = True run_asynchronously(result_callback, fn, *args, **kwargs)
Run `fn(*args, **kwargs)` on a separate thread (or ioloop if the function is `async`).
run
python
hyperdiv/hyperdiv
hyperdiv/components/task.py
https://github.com/hyperdiv/hyperdiv/blob/master/hyperdiv/components/task.py
Apache-2.0
def rerun(self, fn, *args, **kwargs): """Just like `run` but calls `self.clear()` before running.""" self.clear() self.run(fn, *args, **kwargs)
Just like `run` but calls `self.clear()` before running.
rerun
python
hyperdiv/hyperdiv
hyperdiv/components/task.py
https://github.com/hyperdiv/hyperdiv/blob/master/hyperdiv/components/task.py
Apache-2.0
def clear(self): """ Resets the props of the task to initial values. If the task is `done`, clearing it will allow it to run again. Note that if an instance of the task is running at the time `clear()` is called, that instance will be ignored, but it will still run to completion. Note that the `result` prop is not cleared, allowing the app to keep rendering the previous result until the `result` prop is updated with the data of the new run. """ if self.running: logger.warn("Clearing running task.") self._run_number += 1 super().clear()
Resets the props of the task to initial values. If the task is `done`, clearing it will allow it to run again. Note that if an instance of the task is running at the time `clear()` is called, that instance will be ignored, but it will still run to completion. Note that the `result` prop is not cleared, allowing the app to keep rendering the previous result until the `result` prop is updated with the data of the new run.
clear
python
hyperdiv/hyperdiv
hyperdiv/components/task.py
https://github.com/hyperdiv/hyperdiv/blob/master/hyperdiv/components/task.py
Apache-2.0
def __init__(self, *content, **kwargs): """ The chunks passed in `*content` will be joined by `" "` and used to initialize the `content` prop. ```py x = 2 hd.text("I have", x, "chickens.") # is equivalent to hd.text(content=f"I have {x} chickens.") ``` """ if content: kwargs["content"] = concat_text(content) super().__init__(**kwargs)
The chunks passed in `*content` will be joined by `" "` and used to initialize the `content` prop. ```py x = 2 hd.text("I have", x, "chickens.") # is equivalent to hd.text(content=f"I have {x} chickens.") ```
__init__
python
hyperdiv/hyperdiv
hyperdiv/components/text.py
https://github.com/hyperdiv/hyperdiv/blob/master/hyperdiv/components/text.py
Apache-2.0